blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f23e5e2eabc0ea5604a62feb24f8d24c53096630 | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /125_algorithms/_exercises/templates/_algorithms_challenges/algorithm-master/lintcode/689_two_sum_bst_edtion.py | c920ea03fbd1007257c3c49c68706b3c9a0ed00e | []
| no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 1,682 | py | """
Definition of TreeNode:
class TreeNode:
def __init__(self, val):
self.val = val
self.left, self.right = None, None
"""
c_ Solution:
"""
@param: : the root of tree
@param: : the target sum
@return: two numbers from tree which sum is n
"""
___ twoSum root, n
left right N..
head tail root
pre()
nxt()
w.... left !_ right:
_sum left.val + right.val
__ _sum __ n:
r.. [left.val, right.val]
__ _sum < n:
nxt()
____
pre()
___ pre
w.... tail:
cur tail.right
__ cur a.. cur !_ right:
w.... cur.left a.. cur.left !_ tail:
cur cur.left
__ cur.left __ tail:
right tail
cur.left N..
tail tail.left
_____
____
cur.left tail
tail tail.right
____
right tail
tail tail.left
_____
___ nxt
w.... head:
cur head.left
__ cur a.. cur !_ left:
w.... cur.right a.. cur.right !_ head:
cur cur.right
__ cur.right __ head:
left head
cur.right N..
head head.right
_____
____
cur.right head
head head.left
____
left head
head head.right
_____
| [
"[email protected]"
]
| |
cdacac9398ca97d329f4e8333510df3edf223077 | 165e706d485e90f4e4f63cfb9f2c35acda14cfc0 | /property_linking/scripts/preprocessing/closure_inference.py | 7f0f19a43eb2ba1d71b8eecc9798247327ab538f | [
"Apache-2.0"
]
| permissive | Tarkiyah/googleResearch | 65581f3bbbe2ffe248c9e613c0ea7eac336d5372 | dea327aa9e7ef7f7bca5a6c225dbdca1077a06e9 | refs/heads/master | 2022-12-07T12:04:44.153221 | 2019-11-21T16:03:48 | 2019-11-21T16:18:28 | 223,229,888 | 11 | 2 | Apache-2.0 | 2022-11-21T21:39:10 | 2019-11-21T17:38:31 | Jupyter Notebook | UTF-8 | Python | false | false | 3,537 | py | # coding=utf-8
# Copyright 2019 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Find superclasses.
Examples:
closure_inference.py --sling_kb_file <kb> --alsologtostderr
# for locations
closure_inference.py --sling_kb_file <kb> --alsologtostderr
--infile <infile> --closing_rel_id P131
"""
import time
import sling
import tensorflow as tf
FLAGS = tf.flags.FLAGS
tf.flags.DEFINE_string(
'outfile', '/tmp/closed.tsv', 'created file')
tf.flags.DEFINE_string(
'infile',
'',
'input file')
# probably P131 = administrative region of, or P279 = subclass_of
tf.flags.DEFINE_string(
'closing_rel_id', 'P279', 'relation to use to close')
tf.flags.DEFINE_string(
'sling_kb_file',
'',
'where to find sling kb')
tf.flags.DEFINE_string(
'blacklist_substring',
'metaclass',
'discard superclasses with this substring in the name')
tf.flags.DEFINE_boolean(
'trace_closures',
False,
'give ridiculously long debug output')
def load_kb():
"""Load self.names and self.kb.
Returns:
sling kb
"""
tf.logging.info('loading and indexing kb...')
start = time.time()
kb = sling.Store()
kb.load(FLAGS.sling_kb_file)
kb.freeze()
tf.logging.info('loading took %.3f sec' % (time.time() - start))
return kb
def closure(kb, closing_relation_id, cat_id):
"""Return set of ids for logical closure of a category/region.
Args:
kb: a sling kb
closing_relation_id: SUBCLASS_OF_ID or REGION_OF_ID
cat_id: id of the category to find ancestors of
Returns:
the set of all things in the KB of which the category with id cat_id
is a subclass.
"""
result = set()
closer = kb[closing_relation_id]
_collect_ancestors(kb, result, closer, cat_id)
def blacklisted(qid):
name = kb[qid].name
return name and name.find(FLAGS.blacklist_substring) >= 0
if FLAGS.blacklist_substring:
return [e for e in result if not blacklisted(e)]
else:
return result
def _collect_ancestors(kb, buf, closer, cat_id):
if cat_id not in buf:
buf.add(cat_id)
for key, val in kb[cat_id]:
if key == closer and val.id:
_collect_ancestors(kb, buf, closer, val.id)
def main(_):
tf.logging.set_verbosity(tf.logging.INFO)
kb = load_kb()
tf.logging.info('will write to %s*.tsv' % FLAGS.outfile)
tf.logging.info('closing with %s [%s]' % (
kb[FLAGS.closing_rel_id].name,
kb[FLAGS.closing_rel_id]))
with tf.gfile.Open(FLAGS.outfile, 'w') as out_fp:
for line in tf.gfile.GFile(FLAGS.infile):
qid = line.strip()
if qid.startswith('i/'):
qid = qid[len('i/'):]
closed = closure(kb, FLAGS.closing_rel_id, qid)
out_fp.write('\t'.join([qid] + list(closed)) + '\n')
if FLAGS.trace_closures:
if len(closed) > 1:
tf.logging.info('closing %s [%s]' % (kb[qid].name, qid))
for super_qid in closed:
tf.logging.info(' ==> %s [%s]' % (kb[super_qid].name, super_qid))
if __name__ == '__main__':
tf.app.run()
| [
"[email protected]"
]
| |
746c37e2bd1f8cca597462e416e26a4cd58a179a | a7319d1c462618445b13cb8dde5c30173801b745 | /backend/hardik_25509/settings.py | 7596bd63123791db28563da0c6685b368b3fe30e | []
| no_license | crowdbotics-apps/hardik-25509 | 357e87239d5379d4b6c79d746659d8a20942be0b | f179cee72ca96a9115c3aeb46e3d5d4a3abec27a | refs/heads/master | 2023-04-01T19:47:21.352217 | 2021-04-08T05:57:57 | 2021-04-08T05:57:57 | 355,501,693 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,128 | py | """
Django settings for hardik_25509 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
import logging
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"django.contrib.sites",
"course",
]
LOCAL_APPS = [
"home",
"modules",
"users.apps.UsersConfig",
]
THIRD_PARTY_APPS = [
"rest_framework",
"rest_framework.authtoken",
"rest_auth",
"rest_auth.registration",
"bootstrap4",
"allauth",
"allauth.account",
"allauth.socialaccount",
"allauth.socialaccount.providers.google",
"django_extensions",
"drf_yasg",
"storages",
# start fcm_django push notifications
"fcm_django",
# end fcm_django push notifications
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "hardik_25509.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [os.path.join(BASE_DIR, "web_build")],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = "hardik_25509.wsgi.application"
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": os.path.join(BASE_DIR, "db.sqlite3"),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {"default": env.db()}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = "/static/"
MIDDLEWARE += ["whitenoise.middleware.WhiteNoiseMiddleware"]
AUTHENTICATION_BACKENDS = (
"django.contrib.auth.backends.ModelBackend",
"allauth.account.auth_backends.AuthenticationBackend",
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
os.path.join(BASE_DIR, "web_build/static"),
]
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = "email"
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "optional"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# AWS S3 config
AWS_ACCESS_KEY_ID = env.str("AWS_ACCESS_KEY_ID", "")
AWS_SECRET_ACCESS_KEY = env.str("AWS_SECRET_ACCESS_KEY", "")
AWS_STORAGE_BUCKET_NAME = env.str("AWS_STORAGE_BUCKET_NAME", "")
AWS_STORAGE_REGION = env.str("AWS_STORAGE_REGION", "")
USE_S3 = (
AWS_ACCESS_KEY_ID
and AWS_SECRET_ACCESS_KEY
and AWS_STORAGE_BUCKET_NAME
and AWS_STORAGE_REGION
)
if USE_S3:
AWS_S3_CUSTOM_DOMAIN = env.str("AWS_S3_CUSTOM_DOMAIN", "")
AWS_S3_OBJECT_PARAMETERS = {"CacheControl": "max-age=86400"}
AWS_DEFAULT_ACL = env.str("AWS_DEFAULT_ACL", "public-read")
AWS_MEDIA_LOCATION = env.str("AWS_MEDIA_LOCATION", "media")
AWS_AUTO_CREATE_BUCKET = env.bool("AWS_AUTO_CREATE_BUCKET", True)
DEFAULT_FILE_STORAGE = env.str(
"DEFAULT_FILE_STORAGE", "home.storage_backends.MediaStorage"
)
MEDIA_URL = "/mediafiles/"
MEDIA_ROOT = os.path.join(BASE_DIR, "mediafiles")
# start fcm_django push notifications
FCM_DJANGO_SETTINGS = {"FCM_SERVER_KEY": env.str("FCM_SERVER_KEY", "")}
# end fcm_django push notifications
# Swagger settings for api docs
SWAGGER_SETTINGS = {
"DEFAULT_INFO": f"{ROOT_URLCONF}.api_info",
}
if DEBUG or not (EMAIL_HOST_USER and EMAIL_HOST_PASSWORD):
# output email to console instead of sending
if not DEBUG:
logging.warning(
"You should setup `SENDGRID_USERNAME` and `SENDGRID_PASSWORD` env vars to send emails."
)
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
| [
"[email protected]"
]
| |
d0a830d0a0726d6af0ac47e9a69790f3b54dadef | 552556631580799b16d0fb31e8f10850383ef3b2 | /ex3/outputs/cactusADM/cactusADM.DW_01-WS_384.out/info.py | 348937e8858f7dde9a12f4272870b9a72a0e1711 | []
| no_license | gregth/NTUA-advcomparch | f19ee414f8b77f749a09f263feb980350f88880d | bc501f427ddf1423f851ce1a052dc335183c5103 | refs/heads/master | 2022-11-14T20:11:49.035503 | 2020-06-27T09:17:43 | 2020-06-27T09:17:43 | 262,262,423 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 18,949 | py | power = {'BUSES': {'Area': 0.482695,
'Bus/Area': 0.482695,
'Bus/Gate Leakage': 0.00516603,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0538708,
'Bus/Subthreshold Leakage with power gating': 0.0202016,
'Gate Leakage': 0.00516603,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0538708,
'Subthreshold Leakage with power gating': 0.0202016},
'Core': [{'Area': 28.4239,
'Execution Unit/Area': 5.53218,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.172838,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.338443,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.04707,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.764653,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.0899492,
'Execution Unit/Instruction Scheduler/Area': 0.470135,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.344008,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00116949,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.19423,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.330858,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0174957,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00996061,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00190238,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.0412792,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.000272929,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 0.12833,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.146306,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.00407561,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.00231116,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 1.52324,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.084848,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000459962,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 0.200684,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.232419,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0126317,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.0065308,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.709584,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.034203,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0188026,
'Execution Unit/Integer ALUs/Area': 0.0784784,
'Execution Unit/Integer ALUs/Gate Leakage': 0.00442152,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.204417,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.377086,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.0670367,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.0251388,
'Execution Unit/Peak Dynamic': 3.40316,
'Execution Unit/Register Files/Area': 0.0522536,
'Execution Unit/Register Files/Floating Point RF/Area': 0.0288818,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 5.5436e-05,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0224588,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00546953,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00105965,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.000506457,
'Execution Unit/Register Files/Gate Leakage': 9.6217e-05,
'Execution Unit/Register Files/Integer RF/Area': 0.0233718,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 4.0781e-05,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0281522,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0271253,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.000709081,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.000314572,
'Execution Unit/Register Files/Peak Dynamic': 0.0506109,
'Execution Unit/Register Files/Runtime Dynamic': 0.0325949,
'Execution Unit/Register Files/Subthreshold Leakage': 0.00176873,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.000821028,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0255052,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.0037757,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.368845,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.606019,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0572452,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0214669,
'Execution Unit/Runtime Dynamic': 2.82838,
'Execution Unit/Subthreshold Leakage': 1.36145,
'Execution Unit/Subthreshold Leakage with power gating': 0.516679,
'Gate Leakage': 0.331603,
'Instruction Fetch Unit/Area': 4.41231,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 1.88835e-05,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 1.88835e-05,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 1.63189e-05,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 6.24694e-06,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.0012795,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00133359,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00018565,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0565546,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0015406,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 1.59297e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.00823559,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.00697245,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.000228727,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000111376,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.44653,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.243577,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 0.464496,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 0.343509,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.290824,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.10664,
'Instruction Fetch Unit/Runtime Dynamic': 0.542892,
'Instruction Fetch Unit/Subthreshold Leakage': 0.916592,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.402438,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0340729,
'L2/Runtime Dynamic': 0.00925351,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.77651,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 6.2772,
'Load Store Unit/Data Cache/Runtime Dynamic': 2.4315,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0326979,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.163059,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.163059,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 6.89714,
'Load Store Unit/Runtime Dynamic': 3.39871,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.402075,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.80415,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.576917,
'Load Store Unit/Subthreshold Leakage with power gating': 0.277892,
'Memory Management Unit/Area': 0.401395,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.142698,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.143195,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00569508,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.338645,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0399744,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.550505,
'Memory Management Unit/Runtime Dynamic': 0.183169,
'Memory Management Unit/Subthreshold Leakage': 0.0622062,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0344318,
'Peak Dynamic': 21.854,
'Renaming Unit/Area': 0.371598,
'Renaming Unit/FP Front End RAT/Area': 0.284555,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00465468,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 4.02886,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.83368,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0482834,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0275216,
'Renaming Unit/Free List/Area': 0.017889,
'Renaming Unit/Free List/Gate Leakage': 7.78669e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0183078,
'Renaming Unit/Free List/Runtime Dynamic': 0.0461532,
'Renaming Unit/Free List/Subthreshold Leakage': 0.00152673,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000825539,
'Renaming Unit/Gate Leakage': 0.00738568,
'Renaming Unit/Int Front End RAT/Area': 0.0572832,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00177998,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731753,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.546968,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.0178369,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.010167,
'Renaming Unit/Peak Dynamic': 4.86244,
'Renaming Unit/Runtime Dynamic': 1.4268,
'Renaming Unit/Subthreshold Leakage': 0.0729075,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0404868,
'Runtime Dynamic': 8.38921,
'Subthreshold Leakage': 5.70206,
'Subthreshold Leakage with power gating': 2.37718}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 1.0024356260259295,
'Runtime Dynamic': 1.0024356260259295,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.0495652,
'Runtime Dynamic': 0.0235215,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 90.8141,
'Gate Leakage': 0.385182,
'Peak Dynamic': 21.9035,
'Peak Power': 34.8455,
'Runtime Dynamic': 8.41273,
'Subthreshold Leakage': 12.5568,
'Subthreshold Leakage with power gating': 5.87655,
'Total Cores/Area': 28.4239,
'Total Cores/Gate Leakage': 0.331603,
'Total Cores/Peak Dynamic': 21.854,
'Total Cores/Runtime Dynamic': 8.38921,
'Total Cores/Subthreshold Leakage': 5.70206,
'Total Cores/Subthreshold Leakage with power gating': 2.37718,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.0495652,
'Total L3s/Runtime Dynamic': 0.0235215,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 12.942,
'Total NoCs/Area': 0.482695,
'Total NoCs/Gate Leakage': 0.00516603,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0538708,
'Total NoCs/Subthreshold Leakage with power gating': 0.0202016}} | [
"[email protected]"
]
| |
205666d8aee922f8db00530c0d47974317fa9350 | 4210e060f9766e051147a7bc2fe9caf76b57dfda | /library/urls.py | b32fe166fb4219c7b8be1f695222cb8d663fb8ad | []
| no_license | eyobofficial/library | e27cc64d88013664cf27399bc3402d0f2b26fa90 | 87a1aaeb93db5511423be9c1fdb7b96c884d13f1 | refs/heads/master | 2020-12-02T06:19:00.159110 | 2017-07-30T08:02:30 | 2017-07-30T08:02:30 | 96,815,096 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 983 | py | """library URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.views.generic import RedirectView
from django.contrib import admin
urlpatterns = [
url(r'^accounts/', include('django.contrib.auth.urls')),
url(r'^catalog/', include('catalog.urls')),
url(r'^$', RedirectView.as_view(url='/catalog/')),
url(r'^admin/', admin.site.urls),
]
| [
"[email protected]"
]
| |
3ee0b9cfa0a12433c2872fc03735b4afd463fdbb | d5ce517617f90aba1a618098f459b262968a6a20 | /flup/client/scgi_app.py | 33121806e77f5dd59af0fad0fb0829567829f91d | []
| no_license | jedie/flup-py3.3 | 8bfabe2195cfe5df1fb8acfb92a108b43d668e51 | 56d495311d0e850fbab94c6c3e160793e245d0d4 | refs/heads/master | 2021-01-18T06:51:27.659312 | 2014-05-22T01:35:10 | 2014-05-22T01:35:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,923 | py | # Copyright (c) 2006 Allan Saddi <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
# $Id: scgi_app.py 2111 2006-11-25 02:00:21Z asaddi $
__author__ = 'Allan Saddi <[email protected]>'
__version__ = '$Revision: 2111 $'
import select
import struct
import socket
import errno
__all__ = ['SCGIApp']
def encodeNetstring(s):
return ''.join([str(len(s)), ':', s, ','])
class SCGIApp(object):
def __init__(self, connect=None, host=None, port=None,
filterEnviron=True):
if host is not None:
assert port is not None
connect=(host, port)
assert connect is not None
self._connect = connect
self._filterEnviron = filterEnviron
def __call__(self, environ, start_response):
sock = self._getConnection()
outfile = sock.makefile('w')
infile = sock.makefile('r')
sock.close()
# Filter WSGI environ and send as request headers
if self._filterEnviron:
headers = self._defaultFilterEnviron(environ)
else:
headers = self._lightFilterEnviron(environ)
# TODO: Anything not from environ that needs to be sent also?
content_length = int(environ.get('CONTENT_LENGTH') or 0)
if 'CONTENT_LENGTH' in headers:
del headers['CONTENT_LENGTH']
headers_out = ['CONTENT_LENGTH', str(content_length), 'SCGI', '1']
for k,v in list(headers.items()):
headers_out.append(k)
headers_out.append(v)
headers_out.append('') # For trailing NUL
outfile.write(encodeNetstring('\x00'.join(headers_out)))
# Transfer wsgi.input to outfile
while True:
chunk_size = min(content_length, 4096)
s = environ['wsgi.input'].read(chunk_size)
content_length -= len(s)
outfile.write(s)
if not s: break
outfile.close()
# Read result from SCGI server
result = []
while True:
buf = infile.read(4096)
if not buf: break
result.append(buf)
infile.close()
result = ''.join(result)
# Parse response headers
status = '200 OK'
headers = []
pos = 0
while True:
eolpos = result.find('\n', pos)
if eolpos < 0: break
line = result[pos:eolpos-1]
pos = eolpos + 1
# strip in case of CR. NB: This will also strip other
# whitespace...
line = line.strip()
# Empty line signifies end of headers
if not line: break
# TODO: Better error handling
header, value = line.split(':', 1)
header = header.strip().lower()
value = value.strip()
if header == 'status':
# Special handling of Status header
status = value
if status.find(' ') < 0:
# Append a dummy reason phrase if one was not provided
status += ' SCGIApp'
else:
headers.append((header, value))
result = result[pos:]
# Set WSGI status, headers, and return result.
start_response(status, headers)
return [result]
def _getConnection(self):
if type(self._connect) is str:
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
else:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(self._connect)
return sock
_environPrefixes = ['SERVER_', 'HTTP_', 'REQUEST_', 'REMOTE_', 'PATH_',
'CONTENT_']
_environCopies = ['SCRIPT_NAME', 'QUERY_STRING', 'AUTH_TYPE']
_environRenames = {}
def _defaultFilterEnviron(self, environ):
result = {}
for n in list(environ.keys()):
for p in self._environPrefixes:
if n.startswith(p):
result[n] = environ[n]
if n in self._environCopies:
result[n] = environ[n]
if n in self._environRenames:
result[self._environRenames[n]] = environ[n]
return result
def _lightFilterEnviron(self, environ):
result = {}
for n in list(environ.keys()):
if n.upper() == n:
result[n] = environ[n]
return result
if __name__ == '__main__':
from flup.server.ajp import WSGIServer
app = SCGIApp(connect=('localhost', 4000))
#import paste.lint
#app = paste.lint.middleware(app)
WSGIServer(app).run()
| [
"[email protected]"
]
| |
275cda750cead39dc0c7730f98e3492cb24ed36e | f0987e17aea6668158cd334c1fbacfe6286d3c77 | /NITA/tests/unit/security/system/test_security_profile.py | 5372b77e295818e835cd59941c2a62b5a2d67c74 | []
| no_license | fengyun4623/file | 00bf21f952ea3f95ffc9fe18448b244b26b7fadb | 3966c63d48557b0b94303896eed7a767593a4832 | refs/heads/master | 2023-04-02T05:01:25.066052 | 2020-07-29T16:15:31 | 2020-07-29T16:15:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 100,997 | py | # coding: UTF-8
"""All unit test cases for system security-profile module"""
# pylint: disable=attribute-defined-outside-init,invalid-name
__author__ = ['Jon Jiang']
__contact__ = '[email protected]'
__copyright__ = 'Juniper Networks Inc.'
__date__ = '2017'
import re
from unittest import TestCase, mock
from jnpr.toby.hldcl import device as dev
from jnpr.toby.utils.flow_common_tool import flow_common_tool
from jnpr.toby.utils.xml_tool import xml_tool
from jnpr.toby.security.system.security_profile import security_profile
class TestSecurityProfile(TestCase):
"""Unitest cases for security profile module"""
def setUp(self):
"""setup before all cases"""
self.tool = flow_common_tool()
self.xml = xml_tool()
self.ins = security_profile()
self.mock_device_ins = mock.Mock()
self.response = {}
self.response["SA_HE_ADDRESS_BOOK"] = """
<security-profile-address-book-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>SP-root</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>4000</resources-maximum>
</security-profile-information>
</security-profile-address-book-information>
"""
self.response["SA_HE_ADDRESS_BOOK_TEXT"] = """
logical system name security profile name usage reserved maximum feature
root-logical-system SP-root 0 0 4000
"""
self.response["SA_HE_APPFW_PROFILE"] = """
<security-profile-appfw-profile-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="detail">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>SP-root</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>57344</resources-maximum>
</security-profile-information>
</security-profile-appfw-profile-information>
"""
self.response["SA_HE_APPFW_PROFILE_TEXT"] = """
logical system name security profile name usage reserved maximum feature
root-logical-system SP-root 0 0 57344
"""
self.response["SA_HE_APPFW_RULE"] = """
<security-profile-appfw-rule-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>SP-root</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>114688</resources-maximum>
</security-profile-information>
</security-profile-appfw-rule-information>
"""
self.response["SA_HE_APPFW_RULE_TEXT"] = """
logical system name security profile name usage reserved maximum feature
root-logical-system SP-root 0 0 114688
"""
self.response["SA_HE_APPFW_RULE_SET"] = """
<security-profile-appfw-rule-set-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>SP-root</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>57344</resources-maximum>
</security-profile-information>
</security-profile-appfw-rule-set-information>
"""
self.response["SA_HE_APPFW_RULE_SET_TEXT"] = """
logical system name security profile name usage reserved maximum feature
root-logical-system SP-root 0 0 57344
"""
self.response["SA_HE_FLOW_GATE"] = """
<security-profile-flow-gate-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>SP-root</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>524288</resources-maximum>
</security-profile-information>
</security-profile-flow-gate-information>
"""
self.response["SA_HE_FLOW_GATE_TEXT"] = """
logical system name security profile name usage reserved maximum feature
root-logical-system SP-root 0 0 524288
"""
self.response["SA_HE_FLOW_SESSION"] = """
<security-profile-flow-session-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>SP-root</security-profile-name>
<resources-used>4</resources-used>
<resources-reserved>25000</resources-reserved>
<resources-maximum>50000</resources-maximum>
</security-profile-information>
</security-profile-flow-session-information>
"""
self.response["SA_HE_FLOW_SESSION_TEXT"] = """
logical system name security profile name usage reserved maximum feature
root-logical-system SP-root 4 25000 50000
"""
self.response["SA_HE_AUTH_ENTRY"] = """
<security-profile-auth-entry-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>SP-root</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>50000</resources-maximum>
</security-profile-information>
</security-profile-auth-entry-information>
"""
self.response["SA_HE_AUTH_ENTRY_TEXT"] = """
logical system name security profile name usage reserved maximum feature
root-logical-system SP-root 0 0 50000
"""
self.response["SA_HE_DSLITE_SOFTWIRE_INITIATOR"] = """
<security-profile-dslite-softwire-initiator-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>SP-root</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>100000</resources-maximum>
</security-profile-information>
</security-profile-dslite-softwire-initiator-information>
"""
self.response["SA_HE_DSLITE_SOFTWIRE_INITIATOR_TEXT"] = """
logical system name security profile name usage reserved maximum feature
root-logical-system SP-root 0 0 100000
"""
self.response["SA_HE_POLICY"] = """
<security-profile-policy-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>SP-root</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>100</resources-reserved>
<resources-maximum>200</resources-maximum>
</security-profile-information>
</security-profile-policy-information>
"""
self.response["SA_HE_POLICY_TEXT"] = """
logical system name security profile name usage reserved maximum feature
root-logical-system SP-root 0 100 200
"""
self.response["SA_HE_POLICY_WITH_COUNT"] = """
<security-profile-policy-with-count-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>SP-root</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>1024</resources-maximum>
</security-profile-information>
</security-profile-policy-with-count-information>
"""
self.response["SA_HE_POLICY_WITH_COUNT_TEXT"] = """
logical system name security profile name usage reserved maximum feature
root-logical-system SP-root 0 0 1024
"""
self.response["SA_HE_SCHEDULER"] = """
<security-profile-scheduler-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>SP-root</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>256</resources-maximum>
</security-profile-information>
</security-profile-scheduler-information>
"""
self.response["SA_HE_SCHEDULER_TEXT"] = """
logical system name security profile name usage reserved maximum feature
root-logical-system SP-root 0 0 256
"""
self.response["SA_HE_ZONE"] = """
<security-profile-zone-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>SP-root</security-profile-name>
<resources-used>3</resources-used>
<resources-reserved>50</resources-reserved>
<resources-maximum>60</resources-maximum>
</security-profile-information>
</security-profile-zone-information>
"""
self.response["SA_HE_ZONE_TEXT"] = """
logical system name security profile name usage reserved maximum feature
root-logical-system SP-root 3 5 60
"""
self.response["HA_HE_NAT_CONE_BINDING"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-cone-binding-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/15.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>Default-Profile</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>2097152</resources-maximum>
</security-profile-information>
</security-profile-nat-cone-binding-information>
</multi-routing-engine-item>
<multi-routing-engine-item>
<re-name>node1</re-name>
<security-profile-nat-cone-binding-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/15.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>Default-Profile</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>2097152</resources-maximum>
</security-profile-information>
</security-profile-nat-cone-binding-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["SA_LE_NAT_CONE_BINDING"] = """
<security-profile-nat-cone-binding-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1D0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>Default-Profile</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>0</resources-maximum>
</security-profile-information>
</security-profile-nat-cone-binding-information>
"""
self.response["SA_HE_NAT_CONE_BINDING_MULTI_LSYS"] = """
<security-profile-nat-cone-binding-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/17.4I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>Default-Profile</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>2097152</resources-maximum>
</security-profile-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/17.4I0/junos-securityprofile" junos:style="terse">
<logical-system-name>LSYS1</logical-system-name>
<security-profile-name>SP1</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>2097152</resources-maximum>
</security-profile-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/17.4I0/junos-securityprofile" junos:style="terse">
<logical-system-name>LSYS2</logical-system-name>
<security-profile-name>SP2</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>2097152</resources-maximum>
</security-profile-information>
</security-profile-nat-cone-binding-information>
"""
self.response["HA_HE_NAT_CONE_BINDING_TEXT"] = """
node0:
--------------------------------------------------------------------------
logical system name security profile name usage reserved maximum
root-logical-system Default-Profile 0 0 2097152
"""
self.response["SA_HE_NAT_DESTINATION_POOL"] = """
<security-profile-nat-destination-pool-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>Default-Profile</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>8192</resources-maximum>
</security-profile-information>
</security-profile-nat-destination-pool-information>
"""
self.response["HA_HE_NAT_DESTINATION_POOL"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-destination-pool-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.2I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>Default-Profile</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>8192</resources-maximum>
</security-profile-information>
</security-profile-nat-destination-pool-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["HA_HE_NAT_DESTINATION_POOL_TEXT"] = """
node0:
--------------------------------------------------------------------------
logical system name security profile name usage reserved maximum
root-logical-system Default-Profile 0 0 8192
"""
self.response["HA_HE_NAT_DESTINATION_POOL_SUMMARY"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-destination-pool-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="summary">
<resources-used>0</resources-used>
<resources-maximum>8192</resources-maximum>
<resources-available>8192</resources-available>
<total-logical-systems>1</total-logical-systems>
<total-profiles>1</total-profiles>
<heaviest-usage>0</heaviest-usage>
<heaviest-user>root-logical-system</heaviest-user>
<lightest-usage>0</lightest-usage>
<lightest-user>root-logical-system</lightest-user>
</security-profile-information>
</security-profile-nat-destination-pool-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["HA_HE_NAT_DESTINATION_RULE_SUMMARY"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-destination-rule-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="summary">
<resources-used>0</resources-used>
<resources-maximum>8192</resources-maximum>
<resources-available>8192</resources-available>
<total-logical-systems>1</total-logical-systems>
<total-profiles>1</total-profiles>
<heaviest-usage>0</heaviest-usage>
<heaviest-user>root-logical-system</heaviest-user>
<lightest-usage>0</lightest-usage>
<lightest-user>root-logical-system</lightest-user>
</security-profile-information>
</security-profile-nat-destination-rule-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["HA_HE_NAT_DESTINATION_RULE_TEXT"] = """
node0:
--------------------------------------------------------------------------
logical system name security profile name usage reserved maximum feature
root-logical-system Default-Profile 0 0 8192
"""
self.response["HA_HE_NAT_INTERFACE_PORT_OL"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-interface-po-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>Default-Profile</security-profile-name>
<resources-used>64</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>32</resources-maximum>
</security-profile-information>
</security-profile-nat-interface-po-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["HA_HE_NAT_INTERFACE_PORT_OL_SUMMARY"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-interface-po-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="summary">
<resources-used>64</resources-used>
<resources-maximum>128</resources-maximum>
<resources-available>64</resources-available>
<total-logical-systems>1</total-logical-systems>
<total-profiles>1</total-profiles>
<heaviest-usage>64</heaviest-usage>
<heaviest-user>root-logical-system</heaviest-user>
<lightest-usage>64</lightest-usage>
<lightest-user>root-logical-system</lightest-user>
</security-profile-information>
</security-profile-nat-interface-po-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["HA_HE_NAT_INTERFACE_PORT_OL_SUMMARY_TEXT"] = """
node0:
--------------------------------------------------------------------------
global used amount : 64
global maximum quota : 128
global available amount : 64
total logical systems : 1
total security profiles : 1
heaviest usage / user : 64 / root-logical-system
lightest usage / user : 64 / root-logical-system
"""
self.response["HA_HE_NAT_NOPAT_ADDRESS"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-nopat-address-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>Default-Profile</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>1048576</resources-maximum>
</security-profile-information>
</security-profile-nat-nopat-address-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["HA_HE_NAT_NOPAT_ADDRESS_SUMMARY"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-nopat-address-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="summary">
<resources-used>0</resources-used>
<resources-maximum>1048576</resources-maximum>
<resources-available>1048576</resources-available>
<total-logical-systems>1</total-logical-systems>
<total-profiles>1</total-profiles>
<heaviest-usage>0</heaviest-usage>
<heaviest-user>root-logical-system</heaviest-user>
<lightest-usage>0</lightest-usage>
<lightest-user>root-logical-system</lightest-user>
</security-profile-information>
</security-profile-nat-nopat-address-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["HA_HE_NAT_NOPAT_ADDRESS_TEXT"] = """
node0:
--------------------------------------------------------------------------
logical system name security profile name usage reserved maximum feature
root-logical-system Default-Profile 0 0 1048576
"""
self.response["HA_HE_NAT_PAT_ADDRESS"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-pat-address-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>Default-Profile</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>8192</resources-maximum>
</security-profile-information>
</security-profile-nat-pat-address-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["HA_HE_NAT_PAT_ADDRESS_SUMMARY"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-pat-address-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="summary">
<resources-used>0</resources-used>
<resources-maximum>8192</resources-maximum>
<resources-available>8192</resources-available>
<total-logical-systems>1</total-logical-systems>
<total-profiles>1</total-profiles>
<heaviest-usage>0</heaviest-usage>
<heaviest-user>root-logical-system</heaviest-user>
<lightest-usage>0</lightest-usage>
<lightest-user>root-logical-system</lightest-user>
</security-profile-information>
</security-profile-nat-pat-address-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["HA_HE_NAT_PAT_ADDRESS_TEXT"] = """
node0:
--------------------------------------------------------------------------
logical system name security profile name usage reserved maximum feature
root-logical-system Default-Profile 0 0 8192
"""
self.response["HA_HE_NAT_PORT_OL_IPNUMBER"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-port-ol-ipnumber-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>Default-Profile</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>2</resources-maximum>
</security-profile-information>
</security-profile-nat-port-ol-ipnumber-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["HA_HE_NAT_PORT_OL_IPNUMBER_SUMMARY"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-port-ol-ipnumber-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="summary">
<resources-used>0</resources-used>
<resources-maximum>2</resources-maximum>
<resources-available>2</resources-available>
<total-logical-systems>1</total-logical-systems>
<total-profiles>1</total-profiles>
<heaviest-usage>0</heaviest-usage>
<heaviest-user>root-logical-system</heaviest-user>
<lightest-usage>0</lightest-usage>
<lightest-user>root-logical-system</lightest-user>
</security-profile-information>
</security-profile-nat-port-ol-ipnumber-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["HA_HE_NAT_PORT_OL_IPNUMBER_TEXT"] = """
node0:
--------------------------------------------------------------------------
global used amount : 0
global maximum quota : 2
global available amount : 2
total logical systems : 1
total security profiles : 1
heaviest usage / user : 0 / root-logical-system
lightest usage / user : 0 / root-logical-system
"""
self.response["HA_HE_NAT_RULE_REFERENCED_PREFIX"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-rule-referenced-prefix-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>Default-Profile</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>1048576</resources-maximum>
</security-profile-information>
</security-profile-nat-rule-referenced-prefix-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["HA_HE_NAT_RULE_REFERENCED_PREFIX_SUMMARY"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-rule-referenced-prefix-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="summary">
<resources-used>0</resources-used>
<resources-maximum>1048576</resources-maximum>
<resources-available>1048576</resources-available>
<total-logical-systems>1</total-logical-systems>
<total-profiles>1</total-profiles>
<heaviest-usage>0</heaviest-usage>
<heaviest-user>root-logical-system</heaviest-user>
<lightest-usage>0</lightest-usage>
<lightest-user>root-logical-system</lightest-user>
</security-profile-information>
</security-profile-nat-rule-referenced-prefix-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["HA_HE_NAT_RULE_REFERENCED_PREFIX_TEXT"] = """
node0:
--------------------------------------------------------------------------
logical system name security profile name usage reserved maximum feature
root-logical-system Default-Profile 0 0 1048576
"""
self.response["HA_HE_NAT_SOURCE_POOL"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-source-pool-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>Default-Profile</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>8192</resources-maximum>
</security-profile-information>
</security-profile-nat-source-pool-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["HA_HE_NAT_SOURCE_POOL_SUMMARY"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-source-pool-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="summary">
<resources-used>0</resources-used>
<resources-maximum>8192</resources-maximum>
<resources-available>8192</resources-available>
<total-logical-systems>1</total-logical-systems>
<total-profiles>1</total-profiles>
<heaviest-usage>0</heaviest-usage>
<heaviest-user>root-logical-system</heaviest-user>
<lightest-usage>0</lightest-usage>
<lightest-user>root-logical-system</lightest-user>
</security-profile-information>
</security-profile-nat-source-pool-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["HA_HE_NAT_SOURCE_POOL_TEXT"] = """
node0:
--------------------------------------------------------------------------
logical system name security profile name usage reserved maximum feature
root-logical-system Default-Profile 0 0 8192
"""
self.response["HA_HE_NAT_SOURCE_RULE"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-source-rule-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>Default-Profile</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>8192</resources-maximum>
</security-profile-information>
</security-profile-nat-source-rule-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["HA_HE_NAT_SOURCE_RULE_SUMMARY"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-source-rule-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="summary">
<resources-used>0</resources-used>
<resources-maximum>8192</resources-maximum>
<resources-available>8192</resources-available>
<total-logical-systems>1</total-logical-systems>
<total-profiles>1</total-profiles>
<heaviest-usage>0</heaviest-usage>
<heaviest-user>root-logical-system</heaviest-user>
<lightest-usage>0</lightest-usage>
<lightest-user>root-logical-system</lightest-user>
</security-profile-information>
</security-profile-nat-source-rule-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["HA_HE_NAT_SOURCE_RULE_TEXT"] = """
node0:
--------------------------------------------------------------------------
logical system name security profile name usage reserved maximum feature
root-logical-system Default-Profile 0 0 8192
"""
self.response["HA_HE_NAT_STATIC_RULE"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-static-rule-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>Default-Profile</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>8192</resources-maximum>
</security-profile-information>
</security-profile-nat-static-rule-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["HA_HE_NAT_STATIC_RULE_SUMMARY"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-static-rule-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.1I0/junos-securityprofile" junos:style="summary">
<resources-used>0</resources-used>
<resources-maximum>8192</resources-maximum>
<resources-available>8192</resources-available>
<total-logical-systems>1</total-logical-systems>
<total-profiles>1</total-profiles>
<heaviest-usage>0</heaviest-usage>
<heaviest-user>root-logical-system</heaviest-user>
<lightest-usage>0</lightest-usage>
<lightest-user>root-logical-system</lightest-user>
</security-profile-information>
</security-profile-nat-static-rule-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["HA_HE_NAT_STATIC_RULE_TEXT"] = """
node0:
--------------------------------------------------------------------------
logical system name security profile name usage reserved maximum feature
root-logical-system Default-Profile 0 0 8192
"""
self.response["HA_LE_NAT_PAT_PORTNUM"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-pat-portnum-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.2I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>Default-Profile</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>201326592</resources-maximum>
</security-profile-information>
</security-profile-nat-pat-portnum-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["HA_LE_NAT_PAT_PORTNUM_SUMMARY"] = """
<multi-routing-engine-results>
<multi-routing-engine-item>
<re-name>node0</re-name>
<security-profile-nat-pat-portnum-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.2I0/junos-securityprofile" junos:style="summary">
<resources-used>0</resources-used>
<resources-maximum>201326592</resources-maximum>
<resources-available>201326592</resources-available>
<total-logical-systems>1</total-logical-systems>
<total-profiles>1</total-profiles>
<heaviest-usage>0</heaviest-usage>
<heaviest-user>root-logical-system</heaviest-user>
<lightest-usage>0</lightest-usage>
<lightest-user>root-logical-system</lightest-user>
</security-profile-information>
</security-profile-nat-pat-portnum-information>
</multi-routing-engine-item>
</multi-routing-engine-results>
"""
self.response["HA_LE_NAT_PAT_PORTNUM_TEXT"] = """
node0:
--------------------------------------------------------------------------
logical system name security profile name usage reserved maximum
root-logical-system Default-Profile 0 0 201326592
"""
self.response["SA_LE_LOG_STREAM_NUMBER"] = """
<security-profile-security-log-stream-num-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.2I0/junos-securityprofile" junos:style="terse">
<logical-system-name>root-logical-system</logical-system-name>
<security-profile-name>Default-Profile</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>3</resources-maximum>
</security-profile-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.2I0/junos-securityprofile" junos:style="terse">
<logical-system-name>LSYS1</logical-system-name>
<security-profile-name>null</security-profile-name>
<resources-used>0</resources-used>
<resources-reserved>0</resources-reserved>
<resources-maximum>0</resources-maximum>
</security-profile-information>
</security-profile-security-log-stream-num-information>
"""
self.response["SA_LE_LOG_STREAM_NUMBER_SUMMARY"] = """
<security-profile-security-log-stream-num-information>
<security-profile-information xmlns="http://xml.juniper.net/junos/18.2I0/junos-securityprofile" junos:style="summary">
<resources-used>0</resources-used>
<resources-maximum>32</resources-maximum>
<resources-available>32</resources-available>
<total-logical-systems>2</total-logical-systems>
<total-profiles>0</total-profiles>
<heaviest-usage>0</heaviest-usage>
<heaviest-user>root-logical-system ...(2 logical systems)</heaviest-user>
<lightest-usage>0</lightest-usage>
<lightest-user>root-logical-system ...(2 logical systems)</lightest-user>
</security-profile-information>
</security-profile-security-log-stream-num-information>
"""
self.response["SA_LE_LOG_STREAM_NUMBER_TEXT"] = """
global used amount : 0
global maximum quota : 32
global available amount : 32
total logical systems : 2
total security profiles : 0
heaviest usage / user : 0 / root-logical-system ...(2 logical systems)
lightest usage / user : 0 / root-logical-system ...(2 logical systems)
"""
def tearDown(self):
"""teardown after all case"""
pass
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_address_book(self, mock_execute_cli_command_on_device):
"""checking get address book"""
print("SA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_ADDRESS_BOOK"])
response = self.ins.get_address_book(device=self.mock_device_ins, more_options="summary")
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "SP-root")
self.assertEqual(response[0]["resources_maximum"], "4000")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["SA_HE_ADDRESS_BOOK_TEXT"]
response = self.ins.get_address_book(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_appfw_profile(self, mock_execute_cli_command_on_device):
"""checking get appfw profile"""
print("SA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_APPFW_PROFILE"])
response = self.ins.get_appfw_profile(device=self.mock_device_ins, more_options="summary")
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "SP-root")
self.assertEqual(response[0]["resources_maximum"], "57344")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["SA_HE_APPFW_PROFILE_TEXT"]
response = self.ins.get_appfw_profile(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_appfw_rule(self, mock_execute_cli_command_on_device):
"""checking get appfw rule"""
print("SA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_APPFW_RULE"])
response = self.ins.get_appfw_rule(device=self.mock_device_ins, more_options="summary")
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "SP-root")
self.assertEqual(response[0]["resources_maximum"], "114688")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["SA_HE_APPFW_RULE_TEXT"]
response = self.ins.get_appfw_rule(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_appfw_rule_set(self, mock_execute_cli_command_on_device):
"""checking get appfw rule set"""
print("SA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_APPFW_RULE_SET"])
response = self.ins.get_appfw_rule_set(device=self.mock_device_ins, more_options="summary")
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "SP-root")
self.assertEqual(response[0]["resources_maximum"], "57344")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["SA_HE_APPFW_RULE_SET_TEXT"]
response = self.ins.get_appfw_rule_set(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_auth_entry(self, mock_execute_cli_command_on_device):
"""checking get auth entry"""
print("SA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_AUTH_ENTRY"])
response = self.ins.get_auth_entry(device=self.mock_device_ins, more_options="summary")
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "SP-root")
self.assertEqual(response[0]["resources_maximum"], "50000")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["SA_HE_AUTH_ENTRY_TEXT"]
response = self.ins.get_auth_entry(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_dslite_softwire_initiator(self, mock_execute_cli_command_on_device):
"""checking get dslite softwire initiator"""
print("SA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_DSLITE_SOFTWIRE_INITIATOR"])
response = self.ins.get_dslite_softwire_initiator(device=self.mock_device_ins, more_options="summary")
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "SP-root")
self.assertEqual(response[0]["resources_maximum"], "100000")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["SA_HE_DSLITE_SOFTWIRE_INITIATOR_TEXT"]
response = self.ins.get_dslite_softwire_initiator(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_flow_gate(self, mock_execute_cli_command_on_device):
"""checking get flow gate"""
print("SA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_FLOW_GATE"])
response = self.ins.get_flow_gate(device=self.mock_device_ins, more_options="summary")
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "SP-root")
self.assertEqual(response[0]["resources_maximum"], "524288")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["SA_HE_FLOW_GATE_TEXT"]
response = self.ins.get_flow_gate(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_flow_session(self, mock_execute_cli_command_on_device):
"""checking get flow session"""
print("SA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_FLOW_SESSION"])
response = self.ins.get_flow_session(device=self.mock_device_ins, more_options="summary")
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "SP-root")
self.assertEqual(response[0]["resources_maximum"], "50000")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["SA_HE_FLOW_SESSION_TEXT"]
response = self.ins.get_flow_session(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_nat_cone_binding(self, mock_execute_cli_command_on_device):
"""checking get nat cone binding"""
print("SA LE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_LE_NAT_CONE_BINDING"])
response = self.ins.get_nat_cone_binding(device=self.mock_device_ins, more_options="logical-system LSYS1", timeout=30)
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "Default-Profile")
print("HA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_CONE_BINDING"])
response = self.ins.get_nat_cone_binding(device=self.mock_device_ins, more_options="logical-system LSYS1", timeout=30)
self.assertIsInstance(response, list)
self.assertEqual(len(response), 2)
self.assertIn("re_name", response[0])
self.assertIn("re_name", response[1])
print("SA HE setup with multiple LSYS entities")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_NAT_CONE_BINDING_MULTI_LSYS"])
response = self.ins.get_nat_cone_binding(device=self.mock_device_ins)
self.assertIsInstance(response, list)
self.assertEqual(len(response), 3)
for item in response:
self.assertIn("resources_maximum", item)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[1]["logical_system_name"], "LSYS1")
self.assertEqual(response[2]["logical_system_name"], "LSYS2")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["HA_HE_NAT_CONE_BINDING_TEXT"]
response = self.ins.get_nat_cone_binding(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_nat_destination_pool(self, mock_execute_cli_command_on_device):
"""checking get nat destination pool"""
print("SA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_NAT_DESTINATION_POOL"])
response = self.ins.get_nat_destination_pool(device=self.mock_device_ins, more_options="logical-system LSYS1", timeout=30)
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "Default-Profile")
print("HA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_DESTINATION_POOL"])
response = self.ins.get_nat_destination_pool(device=self.mock_device_ins)
self.assertIsInstance(response, list)
self.assertEqual(len(response), 1)
self.assertIn("re_name", response[0])
self.assertEqual(response[0]["resources_maximum"], "8192")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["HA_HE_NAT_DESTINATION_POOL_TEXT"]
response = self.ins.get_nat_destination_pool(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_nat_destination_rule(self, mock_execute_cli_command_on_device):
"""checking get nat destination rule"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_DESTINATION_RULE_SUMMARY"])
response = self.ins.get_nat_destination_rule(device=self.mock_device_ins, more_options="logical-system LSYS1", timeout=30)
self.assertIsInstance(response, list)
self.assertEqual(response[0]["heaviest_user"], "root-logical-system")
self.assertEqual(response[0]["resources_available"], "8192")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["HA_HE_NAT_DESTINATION_RULE_TEXT"]
response = self.ins.get_nat_destination_rule(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_nat_interface_port_ol(self, mock_execute_cli_command_on_device):
"""checking get nat interface port ol"""
print("HA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_INTERFACE_PORT_OL"])
response = self.ins.get_nat_interface_port_ol(device=self.mock_device_ins, more_options="logical-system LSYS1", timeout=30)
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "Default-Profile")
print("HA HE setup with SUMMARY response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_INTERFACE_PORT_OL_SUMMARY"])
response = self.ins.get_nat_interface_port_ol(device=self.mock_device_ins)
self.assertIsInstance(response, list)
self.assertEqual(len(response), 1)
self.assertIn("re_name", response[0])
self.assertEqual(response[0]["resources_used"], "64")
self.assertEqual(response[0]["resources_maximum"], "128")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["HA_HE_NAT_INTERFACE_PORT_OL_SUMMARY_TEXT"]
response = self.ins.get_nat_interface_port_ol(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_nat_nopat_address(self, mock_execute_cli_command_on_device):
"""checking get nat nopat address"""
print("HA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_NOPAT_ADDRESS"])
response = self.ins.get_nat_nopat_address(device=self.mock_device_ins, more_options="logical-system LSYS1", timeout=30)
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "Default-Profile")
self.assertEqual(response[0]["resources_maximum"], "1048576")
print("HA HE setup with SUMMARY response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_NOPAT_ADDRESS_SUMMARY"])
response = self.ins.get_nat_nopat_address(device=self.mock_device_ins)
self.assertIsInstance(response, list)
self.assertEqual(len(response), 1)
self.assertIn("re_name", response[0])
self.assertEqual(response[0]["resources_used"], "0")
self.assertEqual(response[0]["resources_available"], "1048576")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["HA_HE_NAT_NOPAT_ADDRESS_TEXT"]
response = self.ins.get_nat_nopat_address(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_nat_pat_address(self, mock_execute_cli_command_on_device):
"""checking get nat pat address"""
print("HA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_PAT_ADDRESS"])
response = self.ins.get_nat_pat_address(device=self.mock_device_ins, more_options="logical-system LSYS1", timeout=30)
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "Default-Profile")
self.assertEqual(response[0]["resources_maximum"], "8192")
print("HA HE setup with SUMMARY response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_PAT_ADDRESS_SUMMARY"])
response = self.ins.get_nat_pat_address(device=self.mock_device_ins)
self.assertIsInstance(response, list)
self.assertEqual(len(response), 1)
self.assertIn("re_name", response[0])
self.assertEqual(response[0]["resources_used"], "0")
self.assertEqual(response[0]["resources_available"], "8192")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["HA_HE_NAT_PAT_ADDRESS_TEXT"]
response = self.ins.get_nat_pat_address(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_nat_pat_portnum(self, mock_execute_cli_command_on_device):
"""checking get nat pat portn"""
print("HA LE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_LE_NAT_PAT_PORTNUM"])
response = self.ins.get_nat_pat_portnum(device=self.mock_device_ins, more_options="logical-system LSYS1", timeout=30)
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "Default-Profile")
self.assertEqual(response[0]["resources_maximum"], "201326592")
print("HA HE setup with SUMMARY response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_LE_NAT_PAT_PORTNUM_SUMMARY"])
response = self.ins.get_nat_pat_portnum(device=self.mock_device_ins)
self.assertIsInstance(response, list)
self.assertEqual(len(response), 1)
self.assertIn("re_name", response[0])
self.assertEqual(response[0]["resources_used"], "0")
self.assertEqual(response[0]["resources_available"], "201326592")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["HA_LE_NAT_PAT_PORTNUM_TEXT"]
response = self.ins.get_nat_pat_portnum(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_nat_port_ol_ipnumber(self, mock_execute_cli_command_on_device):
"""checking get nat port ol ipnumber"""
print("HA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_PORT_OL_IPNUMBER"])
response = self.ins.get_nat_port_ol_ipnumber(device=self.mock_device_ins, more_options="logical-system LSYS1", timeout=30)
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "Default-Profile")
self.assertEqual(response[0]["resources_maximum"], "2")
print("HA HE setup with SUMMARY response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_PORT_OL_IPNUMBER_SUMMARY"])
response = self.ins.get_nat_port_ol_ipnumber(device=self.mock_device_ins)
self.assertIsInstance(response, list)
self.assertEqual(len(response), 1)
self.assertIn("re_name", response[0])
self.assertEqual(response[0]["resources_used"], "0")
self.assertEqual(response[0]["resources_available"], "2")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["HA_HE_NAT_PORT_OL_IPNUMBER_TEXT"]
response = self.ins.get_nat_port_ol_ipnumber(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_nat_rule_referenced_prefix(self, mock_execute_cli_command_on_device):
"""checking get nat port ol ipnumber"""
print("HA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_RULE_REFERENCED_PREFIX"])
response = self.ins.get_nat_rule_referenced_prefix(device=self.mock_device_ins, more_options="logical-system LSYS1", timeout=30)
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "Default-Profile")
self.assertEqual(response[0]["resources_maximum"], "1048576")
print("HA HE setup with SUMMARY response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_RULE_REFERENCED_PREFIX_SUMMARY"])
response = self.ins.get_nat_rule_referenced_prefix(device=self.mock_device_ins)
self.assertIsInstance(response, list)
self.assertEqual(len(response), 1)
self.assertIn("re_name", response[0])
self.assertEqual(response[0]["resources_used"], "0")
self.assertEqual(response[0]["resources_available"], "1048576")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["HA_HE_NAT_RULE_REFERENCED_PREFIX_TEXT"]
response = self.ins.get_nat_rule_referenced_prefix(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_nat_source_pool(self, mock_execute_cli_command_on_device):
"""checking get nat port ol ipnumber"""
print("HA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_SOURCE_POOL"])
response = self.ins.get_nat_source_pool(device=self.mock_device_ins, more_options="logical-system LSYS1", timeout=30)
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "Default-Profile")
self.assertEqual(response[0]["resources_maximum"], "8192")
print("HA HE setup with SUMMARY response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_SOURCE_POOL_SUMMARY"])
response = self.ins.get_nat_source_pool(device=self.mock_device_ins)
self.assertIsInstance(response, list)
self.assertEqual(len(response), 1)
self.assertIn("re_name", response[0])
self.assertEqual(response[0]["resources_used"], "0")
self.assertEqual(response[0]["resources_available"], "8192")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["HA_HE_NAT_SOURCE_POOL_TEXT"]
response = self.ins.get_nat_source_pool(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_nat_source_rule(self, mock_execute_cli_command_on_device):
"""checking get nat port ol ipnumber"""
print("HA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_SOURCE_RULE"])
response = self.ins.get_nat_source_rule(device=self.mock_device_ins, more_options="logical-system LSYS1", timeout=30)
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "Default-Profile")
self.assertEqual(response[0]["resources_maximum"], "8192")
print("HA HE setup with SUMMARY response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_SOURCE_RULE_SUMMARY"])
response = self.ins.get_nat_source_rule(device=self.mock_device_ins)
self.assertIsInstance(response, list)
self.assertEqual(len(response), 1)
self.assertIn("re_name", response[0])
self.assertEqual(response[0]["resources_used"], "0")
self.assertEqual(response[0]["resources_available"], "8192")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["HA_HE_NAT_SOURCE_RULE_TEXT"]
response = self.ins.get_nat_source_rule(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_nat_static_rule(self, mock_execute_cli_command_on_device):
"""checking get nat port ol ipnumber"""
print("HA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_STATIC_RULE"])
response = self.ins.get_nat_static_rule(device=self.mock_device_ins, more_options="logical-system LSYS1", timeout=30)
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "Default-Profile")
self.assertEqual(response[0]["resources_maximum"], "8192")
print("HA HE setup with SUMMARY response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_STATIC_RULE_SUMMARY"])
response = self.ins.get_nat_static_rule(device=self.mock_device_ins)
self.assertIsInstance(response, list)
self.assertEqual(len(response), 1)
self.assertIn("re_name", response[0])
self.assertEqual(response[0]["resources_used"], "0")
self.assertEqual(response[0]["resources_available"], "8192")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["HA_HE_NAT_STATIC_RULE_TEXT"]
response = self.ins.get_nat_static_rule(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_policy(self, mock_execute_cli_command_on_device):
"""checking get policy"""
print("SA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_POLICY"])
response = self.ins.get_policy(device=self.mock_device_ins, more_options="summary")
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "SP-root")
self.assertEqual(response[0]["resources_maximum"], "200")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["SA_HE_POLICY_TEXT"]
response = self.ins.get_policy(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_policy_with_count(self, mock_execute_cli_command_on_device):
"""checking get policy with count"""
print("SA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_POLICY_WITH_COUNT"])
response = self.ins.get_policy_with_count(device=self.mock_device_ins, more_options="summary")
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "SP-root")
self.assertEqual(response[0]["resources_maximum"], "1024")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["SA_HE_POLICY_WITH_COUNT_TEXT"]
response = self.ins.get_policy_with_count(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_scheduler(self, mock_execute_cli_command_on_device):
"""checking get scheduler"""
print("SA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_SCHEDULER"])
response = self.ins.get_scheduler(device=self.mock_device_ins, more_options="summary")
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "SP-root")
self.assertEqual(response[0]["resources_maximum"], "256")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["SA_HE_SCHEDULER_TEXT"]
response = self.ins.get_scheduler(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_security_log_stream_number(self, mock_execute_cli_command_on_device):
"""checking get security log stream number"""
print("SA LE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_LE_LOG_STREAM_NUMBER"])
response = self.ins.get_security_log_stream_number(device=self.mock_device_ins, more_options="summary")
self.assertIsInstance(response, list)
self.assertEqual(response[1]["logical_system_name"], "LSYS1")
self.assertEqual(response[1]["security_profile_name"], "null")
self.assertEqual(response[1]["resources_maximum"], "0")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["SA_LE_LOG_STREAM_NUMBER_TEXT"]
response = self.ins.get_security_log_stream_number(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_get_zone(self, mock_execute_cli_command_on_device):
"""checking get zone"""
print("SA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_ZONE"])
response = self.ins.get_zone(device=self.mock_device_ins, more_options="summary")
self.assertIsInstance(response, list)
self.assertEqual(response[0]["logical_system_name"], "root-logical-system")
self.assertEqual(response[0]["security_profile_name"], "SP-root")
self.assertEqual(response[0]["resources_maximum"], "60")
print("TEXT response")
mock_execute_cli_command_on_device.return_value = self.response["SA_HE_ZONE_TEXT"]
response = self.ins.get_zone(device=self.mock_device_ins, return_mode="text")
self.assertIsInstance(response, str)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_address_book(self, mock_execute_cli_command_on_device):
"""checking search address book"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_ADDRESS_BOOK"])
response = self.ins.search_address_book(
device=self.mock_device_ins,
logical_system_name=["root", "in"],
resources_used=0,
resources_reserved=0,
resources_maximum="4000 eq",
security_profile_name="SP in",
)
self.assertTrue(response)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_appfw_profile(self, mock_execute_cli_command_on_device):
"""checking search appfw profile"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_APPFW_PROFILE"])
response = self.ins.search_appfw_profile(
device=self.mock_device_ins,
logical_system_name=["root", "in"],
resources_used=0,
resources_reserved=0,
resources_maximum="0-57344 in",
security_profile_name="SP in",
)
self.assertTrue(response)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_appfw_rule(self, mock_execute_cli_command_on_device):
"""checking search appfw rule"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_APPFW_RULE"])
response = self.ins.search_appfw_rule(
device=self.mock_device_ins,
logical_system_name=["root", "in"],
resources_used=0,
resources_reserved=0,
resources_maximum="114688",
security_profile_name="SP in",
)
self.assertTrue(response)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_appfw_rule_set(self, mock_execute_cli_command_on_device):
"""checking search appfw rule set"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_APPFW_RULE_SET"])
response = self.ins.search_appfw_rule_set(
device=self.mock_device_ins,
logical_system_name=["root", "in"],
resources_used=0,
resources_reserved=0,
resources_maximum="0-57344 in",
security_profile_name="SP in",
)
self.assertTrue(response)
print("HA HE setup not match response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_APPFW_RULE_SET"])
response = self.ins.search_appfw_rule_set(
device=self.mock_device_ins,
logical_system_name=["root", "in"],
resources_used=0,
resources_reserved=0,
resources_maximum="0",
security_profile_name="SP in",
)
self.assertFalse(response)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_auth_entry(self, mock_execute_cli_command_on_device):
"""checking search auth entry"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_AUTH_ENTRY"])
response = self.ins.search_auth_entry(
device=self.mock_device_ins,
logical_system_name=["root", "in"],
resources_used=0,
resources_reserved=0,
resources_maximum="50000 eq",
security_profile_name="SP in",
)
self.assertTrue(response)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_dslite_softwire_initiator(self, mock_execute_cli_command_on_device):
"""checking search auth entry"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_DSLITE_SOFTWIRE_INITIATOR"])
response = self.ins.search_dslite_softwire_initiator(
device=self.mock_device_ins,
logical_system_name=["root", "in"],
resources_used=0,
resources_reserved=0,
resources_maximum="100000 eq",
security_profile_name="SP in",
)
self.assertTrue(response)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_flow_gate(self, mock_execute_cli_command_on_device):
"""checking search flow gate"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_FLOW_GATE"])
response = self.ins.search_flow_gate(
device=self.mock_device_ins,
logical_system_name=["root", "in"],
resources_used=0,
resources_reserved=0,
resources_maximum="524288 eq",
security_profile_name="SP in",
)
self.assertTrue(response)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_flow_session(self, mock_execute_cli_command_on_device):
"""checking search flow session"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_FLOW_SESSION"])
response = self.ins.search_flow_session(
device=self.mock_device_ins,
logical_system_name=["root", "in"],
resources_used=4,
resources_reserved=25000,
resources_maximum="50000 eq",
security_profile_name="SP in",
)
self.assertTrue(response)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_policy(self, mock_execute_cli_command_on_device):
"""checking search flow policy"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_POLICY"])
response = self.ins.search_policy(
device=self.mock_device_ins,
logical_system_name=["root", "in"],
resources_used=0,
resources_reserved=100,
resources_maximum="200 eq",
security_profile_name="SP in",
)
self.assertTrue(response)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_policy_with_count(self, mock_execute_cli_command_on_device):
"""checking search flow policy with count"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_POLICY_WITH_COUNT"])
response = self.ins.search_policy_with_count(
device=self.mock_device_ins,
logical_system_name=["root", "in"],
resources_used=0,
resources_reserved=0,
resources_maximum="1024 eq",
security_profile_name="SP in",
)
self.assertTrue(response)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_scheduler(self, mock_execute_cli_command_on_device):
"""checking search flow scheduler"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_SCHEDULER"])
response = self.ins.search_scheduler(
device=self.mock_device_ins,
logical_system_name=["root", "in"],
resources_used=0,
resources_reserved=0,
resources_maximum="256 eq",
security_profile_name="SP in",
)
self.assertTrue(response)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_zone(self, mock_execute_cli_command_on_device):
"""checking search flow zone"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_ZONE"])
response = self.ins.search_zone(
device=self.mock_device_ins,
logical_system_name=["root", "in"],
resources_used=3,
resources_reserved=50,
resources_maximum="60 eq",
security_profile_name="SP in",
)
self.assertTrue(response)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_nat_cone_binding(self, mock_execute_cli_command_on_device):
"""checking search nat cone binding"""
print("SA LE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_LE_NAT_CONE_BINDING"])
response = self.ins.search_nat_cone_binding(
device=self.mock_device_ins,
logical_system_name="root in",
security_profile_name="Default-Profile",
resources_used="0-10 in",
resources_reserved=("0-10", "in"),
resources_maximum=0,
)
self.assertTrue(response)
print("HA HE setup with normal response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_CONE_BINDING"])
response = self.ins.search_nat_cone_binding(
device=self.mock_device_ins,
return_mode="counter",
logical_system_name="root in",
security_profile_name="Default-Profile",
resources_used="0-10 in",
resources_reserved=("0-10", "in"),
resources_maximum=2097152,
re_name="node0",
)
self.assertEqual(response, 1)
print("search from previous result")
response = self.ins.search_nat_cone_binding(
device=self.mock_device_ins,
return_mode="counter",
match_from_previous_response=True,
logical_system_name="root in",
security_profile_name="Default-Profile",
resources_used="0-10 in",
resources_reserved=("0-10", "in"),
resources_maximum=2097152,
)
self.assertEqual(response, 2)
print("search from multiple LSYS")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_NAT_CONE_BINDING_MULTI_LSYS"])
response = self.ins.search_nat_cone_binding(
device=self.mock_device_ins,
return_mode="counter",
logical_system_name="LSYS in",
security_profile_name="SP2",
resources_used="0-10 in",
resources_reserved="0",
resources_maximum=2097152,
)
self.assertEqual(response, 1)
print("invalid option checking")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_LE_NAT_CONE_BINDING"])
response = self.ins.search_nat_cone_binding(
device=self.mock_device_ins,
unknown_option="root in",
)
self.assertFalse(response)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_nat_destination_pool(self, mock_execute_cli_command_on_device):
"""checking search nat destination pool"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_DESTINATION_POOL_SUMMARY"])
response = self.ins.search_nat_destination_pool(
device=self.mock_device_ins,
re_name="node0",
lightest_user=["root", "in"],
resources_maximum=8192,
total_profiles="1",
total_logical_systems=1,
)
self.assertTrue(response)
print("SA LE setup search by counter")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_HE_NAT_DESTINATION_POOL"])
response = self.ins.search_nat_destination_pool(
device=self.mock_device_ins,
return_mode="counter",
security_profile_name="Default-Profile",
resources_maximum=8192,
resources_used=0,
resources_reserved=0,
)
self.assertEqual(response, 1)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_nat_destination_rule(self, mock_execute_cli_command_on_device):
"""checking search nat destination rule"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_DESTINATION_RULE_SUMMARY"])
response = self.ins.search_nat_destination_rule(
device=self.mock_device_ins,
re_name="node0",
lightest_user=["root", "in"],
resources_maximum=8192,
total_profiles="1",
total_logical_systems=1,
)
self.assertTrue(response)
print("SA LE setup search by counter")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_DESTINATION_RULE_SUMMARY"])
response = self.ins.search_nat_destination_rule(
device=self.mock_device_ins,
return_mode="counter",
re_name="node0",
lightest_user=["root", "in"],
resources_maximum=8192,
total_profiles="1",
total_logical_systems=1,
)
self.assertEqual(response, 1)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_nat_interface_port_ol(self, mock_execute_cli_command_on_device):
"""checking search nat interface port ol"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_INTERFACE_PORT_OL"])
response = self.ins.search_nat_interface_port_ol(
device=self.mock_device_ins,
re_name="node0",
resources_maximum=32,
resources_used=64,
)
self.assertTrue(response)
print("HA_HE setup with summary by counter")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_INTERFACE_PORT_OL_SUMMARY"])
response = self.ins.search_nat_interface_port_ol(
device=self.mock_device_ins,
return_mode="counter",
re_name="node0",
lightest_user=["root", "in"],
resources_maximum=128,
total_profiles="1",
total_logical_systems=1,
)
self.assertEqual(response, 1)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_nat_nopat_address(self, mock_execute_cli_command_on_device):
"""checking search nat nopat address"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_NOPAT_ADDRESS"])
response = self.ins.search_nat_nopat_address(
device=self.mock_device_ins,
re_name="node0",
resources_maximum="10000-10485760 in",
resources_used=0,
)
self.assertTrue(response)
print("HA_HE setup with summary by counter")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_NOPAT_ADDRESS_SUMMARY"])
response = self.ins.search_nat_nopat_address(
device=self.mock_device_ins,
return_mode="counter",
re_name="node0",
lightest_user=["root", "in"],
resources_maximum=1048576,
total_profiles="1",
total_logical_systems=1,
)
self.assertEqual(response, 1)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_nat_pat_address(self, mock_execute_cli_command_on_device):
"""checking search nat pat address"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_PAT_ADDRESS"])
response = self.ins.search_nat_pat_address(
device=self.mock_device_ins,
re_name="node0",
resources_maximum="8192",
resources_used=0,
)
self.assertTrue(response)
print("HA_HE setup with summary by counter")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_PAT_ADDRESS_SUMMARY"])
response = self.ins.search_nat_pat_address(
device=self.mock_device_ins,
return_mode="counter",
re_name="node0",
lightest_user=["root", "in"],
resources_maximum=8192,
total_profiles="1",
total_logical_systems=1,
)
self.assertEqual(response, 1)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_nat_pat_portnum(self, mock_execute_cli_command_on_device):
"""checking search nat pat port"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_LE_NAT_PAT_PORTNUM"])
response = self.ins.search_nat_pat_portnum(
device=self.mock_device_ins,
re_name="node0",
resources_maximum="201326592",
resources_used=0,
)
self.assertTrue(response)
print("HA_HE setup with summary by counter")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_LE_NAT_PAT_PORTNUM_SUMMARY"])
response = self.ins.search_nat_pat_portnum(
device=self.mock_device_ins,
return_mode="counter",
re_name="node0",
lightest_user=["root", "in"],
resources_maximum=201326592,
total_profiles="1",
total_logical_systems=1,
)
self.assertEqual(response, 1)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_nat_port_ol_ipnumber(self, mock_execute_cli_command_on_device):
"""checking search nat pat address"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_PORT_OL_IPNUMBER"])
response = self.ins.search_nat_port_ol_ipnumber(
device=self.mock_device_ins,
re_name="node0",
resources_maximum="2",
resources_used=0,
)
self.assertTrue(response)
print("HA_HE setup with summary by counter")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_PORT_OL_IPNUMBER_SUMMARY"])
response = self.ins.search_nat_port_ol_ipnumber(
device=self.mock_device_ins,
return_mode="counter",
re_name="node0",
lightest_user=["root", "in"],
resources_maximum=2,
total_profiles="1",
total_logical_systems=1,
)
self.assertEqual(response, 1)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_nat_rule_referenced_prefix(self, mock_execute_cli_command_on_device):
"""checking search nat rule referenced prefix"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_RULE_REFERENCED_PREFIX"])
response = self.ins.search_nat_rule_referenced_prefix(
device=self.mock_device_ins,
re_name="node0",
resources_maximum="1048576",
resources_used=0,
)
self.assertTrue(response)
print("HA_HE setup with summary by counter")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_RULE_REFERENCED_PREFIX_SUMMARY"])
response = self.ins.search_nat_rule_referenced_prefix(
device=self.mock_device_ins,
return_mode="counter",
re_name="node0",
lightest_user=["root", "in"],
resources_maximum=1048576,
total_profiles="1",
total_logical_systems=1,
)
self.assertEqual(response, 1)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_nat_source_pool(self, mock_execute_cli_command_on_device):
"""checking search nat source pool"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_SOURCE_POOL"])
response = self.ins.search_nat_source_pool(
device=self.mock_device_ins,
re_name="node0",
resources_maximum="8192",
resources_used=0,
)
self.assertTrue(response)
print("HA_HE setup with summary by counter")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_SOURCE_POOL_SUMMARY"])
response = self.ins.search_nat_source_pool(
device=self.mock_device_ins,
return_mode="counter",
re_name="node0",
lightest_user=["root", "in"],
resources_maximum=8192,
total_profiles="1",
total_logical_systems=1,
)
self.assertEqual(response, 1)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_nat_source_rule(self, mock_execute_cli_command_on_device):
"""checking search nat source rule"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_SOURCE_RULE"])
response = self.ins.search_nat_source_rule(
device=self.mock_device_ins,
re_name="node0",
resources_maximum="8192",
resources_used=0,
)
self.assertTrue(response)
print("HA_HE setup with summary by counter")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_SOURCE_RULE_SUMMARY"])
response = self.ins.search_nat_source_rule(
device=self.mock_device_ins,
return_mode="counter",
re_name="node0",
lightest_user=["root", "in"],
resources_maximum=8192,
total_profiles="1",
total_logical_systems=1,
)
self.assertEqual(response, 1)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_nat_static_rule(self, mock_execute_cli_command_on_device):
"""checking search nat static rule"""
print("HA HE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_STATIC_RULE"])
response = self.ins.search_nat_static_rule(
device=self.mock_device_ins,
re_name="node0",
resources_maximum="8192",
resources_used=0,
)
self.assertTrue(response)
print("HA_HE setup with summary by counter")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["HA_HE_NAT_STATIC_RULE_SUMMARY"])
response = self.ins.search_nat_static_rule(
device=self.mock_device_ins,
return_mode="counter",
re_name="node0",
lightest_user=["root", "in"],
resources_maximum=8192,
total_profiles="1",
total_logical_systems=1,
)
self.assertEqual(response, 1)
@mock.patch.object(dev, "execute_cli_command_on_device")
def test_search_security_log_stream_number(self, mock_execute_cli_command_on_device):
"""checking search security log stream number"""
print("SA LE setup with summary response")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_LE_LOG_STREAM_NUMBER"])
response = self.ins.search_security_log_stream_number(
device=self.mock_device_ins,
security_profile_name="Default-Profile",
resources_maximum="3",
resources_used=0,
)
self.assertTrue(response)
print("SA LE setup with summary by counter")
mock_execute_cli_command_on_device.return_value = self.xml.xml_string_to_dict(self.response["SA_LE_LOG_STREAM_NUMBER_SUMMARY"])
response = self.ins.search_security_log_stream_number(
device=self.mock_device_ins,
return_mode="counter",
lightest_user=["root", "in"],
resources_maximum=32,
total_profiles="0",
total_logical_systems=2,
)
self.assertEqual(response, 1)
| [
"[email protected]"
]
| |
143849de7018d4928b5e73e2759cfa84e3317b7e | be9c2b431a32908895bed448a30227e45e3d1eb8 | /data/tia_augmentation.py | bd00a409a69de26f381ab2ded216b8d1a6a131a0 | []
| no_license | songbae/p4-fr-ocr-oriental-chicken-curry | 5e2dc947d7d6114d80346007f7d79dc6ab126b8a | 8c1a0955ff97dbd24eed78c17cb9ccd44b86a47b | refs/heads/main | 2023-05-31T05:11:22.035653 | 2021-06-16T19:26:26 | 2021-06-16T19:26:26 | 377,679,712 | 1 | 0 | null | 2021-06-17T02:14:44 | 2021-06-17T02:14:43 | null | UTF-8 | Python | false | false | 9,344 | py | # https://github.com/PaddlePaddle/PaddleOCR/tree/release/2.1/ppocr/data/imaug/text_image_aug
import cv2
import numpy as np
from albumentations.core.transforms_interface import ImageOnlyTransform
class WarpMLS:
def __init__(self, src, src_pts, dst_pts, dst_w, dst_h, trans_ratio=1.):
self.src = src
self.src_pts = src_pts
self.dst_pts = dst_pts
self.pt_count = len(self.dst_pts)
self.dst_w = dst_w
self.dst_h = dst_h
self.trans_ratio = trans_ratio
self.grid_size = 100
self.rdx = np.zeros((self.dst_h, self.dst_w))
self.rdy = np.zeros((self.dst_h, self.dst_w))
@staticmethod
def __bilinear_interp(x, y, v11, v12, v21, v22):
return (v11 * (1 - y) + v12 * y) * (1 - x) + (v21 *
(1 - y) + v22 * y) * x
def generate(self):
self.calc_delta()
return self.gen_img()
def calc_delta(self):
w = np.zeros(self.pt_count, dtype=np.float32)
if self.pt_count < 2:
return
i = 0
while 1:
if self.dst_w <= i < self.dst_w + self.grid_size - 1:
i = self.dst_w - 1
elif i >= self.dst_w:
break
j = 0
while 1:
if self.dst_h <= j < self.dst_h + self.grid_size - 1:
j = self.dst_h - 1
elif j >= self.dst_h:
break
sw = 0
swp = np.zeros(2, dtype=np.float32)
swq = np.zeros(2, dtype=np.float32)
new_pt = np.zeros(2, dtype=np.float32)
cur_pt = np.array([i, j], dtype=np.float32)
k = 0
for k in range(self.pt_count):
if i == self.dst_pts[k][0] and j == self.dst_pts[k][1]:
break
w[k] = 1. / (
(i - self.dst_pts[k][0]) * (i - self.dst_pts[k][0]) +
(j - self.dst_pts[k][1]) * (j - self.dst_pts[k][1]))
sw += w[k]
swp = swp + w[k] * np.array(self.dst_pts[k])
swq = swq + w[k] * np.array(self.src_pts[k])
if k == self.pt_count - 1:
pstar = 1 / sw * swp
qstar = 1 / sw * swq
miu_s = 0
for k in range(self.pt_count):
if i == self.dst_pts[k][0] and j == self.dst_pts[k][1]:
continue
pt_i = self.dst_pts[k] - pstar
miu_s += w[k] * np.sum(pt_i * pt_i)
cur_pt -= pstar
cur_pt_j = np.array([-cur_pt[1], cur_pt[0]])
for k in range(self.pt_count):
if i == self.dst_pts[k][0] and j == self.dst_pts[k][1]:
continue
pt_i = self.dst_pts[k] - pstar
pt_j = np.array([-pt_i[1], pt_i[0]])
tmp_pt = np.zeros(2, dtype=np.float32)
tmp_pt[0] = np.sum(pt_i * cur_pt) * self.src_pts[k][0] - \
np.sum(pt_j * cur_pt) * self.src_pts[k][1]
tmp_pt[1] = -np.sum(pt_i * cur_pt_j) * self.src_pts[k][0] + \
np.sum(pt_j * cur_pt_j) * self.src_pts[k][1]
tmp_pt *= (w[k] / miu_s)
new_pt += tmp_pt
new_pt += qstar
else:
new_pt = self.src_pts[k]
self.rdx[j, i] = new_pt[0] - i
self.rdy[j, i] = new_pt[1] - j
j += self.grid_size
i += self.grid_size
def gen_img(self):
src_h, src_w = self.src.shape[:2]
dst = np.zeros_like(self.src, dtype=np.float32)
for i in np.arange(0, self.dst_h, self.grid_size):
for j in np.arange(0, self.dst_w, self.grid_size):
ni = i + self.grid_size
nj = j + self.grid_size
w = h = self.grid_size
if ni >= self.dst_h:
ni = self.dst_h - 1
h = ni - i + 1
if nj >= self.dst_w:
nj = self.dst_w - 1
w = nj - j + 1
di = np.reshape(np.arange(h), (-1, 1))
dj = np.reshape(np.arange(w), (1, -1))
delta_x = self.__bilinear_interp(
di / h, dj / w, self.rdx[i, j], self.rdx[i, nj],
self.rdx[ni, j], self.rdx[ni, nj])
delta_y = self.__bilinear_interp(
di / h, dj / w, self.rdy[i, j], self.rdy[i, nj],
self.rdy[ni, j], self.rdy[ni, nj])
nx = j + dj + delta_x * self.trans_ratio
ny = i + di + delta_y * self.trans_ratio
nx = np.clip(nx, 0, src_w - 1)
ny = np.clip(ny, 0, src_h - 1)
nxi = np.array(np.floor(nx), dtype=np.int32)
nyi = np.array(np.floor(ny), dtype=np.int32)
nxi1 = np.array(np.ceil(nx), dtype=np.int32)
nyi1 = np.array(np.ceil(ny), dtype=np.int32)
if len(self.src.shape) == 3:
x = np.tile(np.expand_dims(ny - nyi, axis=-1), (1, 1, 3))
y = np.tile(np.expand_dims(nx - nxi, axis=-1), (1, 1, 3))
else:
x = ny - nyi
y = nx - nxi
dst[i:i + h, j:j + w] = self.__bilinear_interp(
x, y, self.src[nyi, nxi], self.src[nyi, nxi1],
self.src[nyi1, nxi], self.src[nyi1, nxi1])
dst = np.clip(dst, 0, 255)
dst = np.array(dst, dtype=np.uint8)
return dst
def tia_distort(src, segment=4):
img_h, img_w = src.shape[:2]
cut = img_w // segment
thresh = cut // 3
src_pts = list()
dst_pts = list()
src_pts.append([0, 0])
src_pts.append([img_w, 0])
src_pts.append([img_w, img_h])
src_pts.append([0, img_h])
dst_pts.append([np.random.randint(thresh), np.random.randint(thresh)])
dst_pts.append(
[img_w - np.random.randint(thresh), np.random.randint(thresh)])
dst_pts.append(
[img_w - np.random.randint(thresh), img_h - np.random.randint(thresh)])
dst_pts.append(
[np.random.randint(thresh), img_h - np.random.randint(thresh)])
half_thresh = thresh * 0.5
for cut_idx in np.arange(1, segment, 1):
src_pts.append([cut * cut_idx, 0])
src_pts.append([cut * cut_idx, img_h])
dst_pts.append([
cut * cut_idx + np.random.randint(thresh) - half_thresh,
np.random.randint(thresh) - half_thresh
])
dst_pts.append([
cut * cut_idx + np.random.randint(thresh) - half_thresh,
img_h + np.random.randint(thresh) - half_thresh
])
trans = WarpMLS(src, src_pts, dst_pts, img_w, img_h)
dst = trans.generate()
return dst
def tia_stretch(src, segment=4):
img_h, img_w = src.shape[:2]
cut = img_w // segment
thresh = cut * 4 // 5
src_pts = list()
dst_pts = list()
src_pts.append([0, 0])
src_pts.append([img_w, 0])
src_pts.append([img_w, img_h])
src_pts.append([0, img_h])
dst_pts.append([0, 0])
dst_pts.append([img_w, 0])
dst_pts.append([img_w, img_h])
dst_pts.append([0, img_h])
half_thresh = thresh * 0.5
for cut_idx in np.arange(1, segment, 1):
move = np.random.randint(thresh) - half_thresh
src_pts.append([cut * cut_idx, 0])
src_pts.append([cut * cut_idx, img_h])
dst_pts.append([cut * cut_idx + move, 0])
dst_pts.append([cut * cut_idx + move, img_h])
trans = WarpMLS(src, src_pts, dst_pts, img_w, img_h)
dst = trans.generate()
return dst
def tia_perspective(src):
img_h, img_w = src.shape[:2]
thresh = img_h // 2
src_pts = list()
dst_pts = list()
src_pts.append([0, 0])
src_pts.append([img_w, 0])
src_pts.append([img_w, img_h])
src_pts.append([0, img_h])
dst_pts.append([0, np.random.randint(thresh)])
dst_pts.append([img_w, np.random.randint(thresh)])
dst_pts.append([img_w, img_h - np.random.randint(thresh)])
dst_pts.append([0, img_h - np.random.randint(thresh)])
trans = WarpMLS(src, src_pts, dst_pts, img_w, img_h)
dst = trans.generate()
return dst
class TIADistortion(ImageOnlyTransform):
def __init__(self, segment=4, always_apply=False, p=0.5):
super().__init__(always_apply=always_apply, p=p)
self.segment = segment
def apply(self, img, **params):
return tia_distort(img, self.segment)
class TIAStretch(ImageOnlyTransform):
def __init__(self, segment=4, always_apply=False, p=0.5):
super().__init__(always_apply=always_apply, p=p)
self.segment = segment
def apply(self, img, **params):
return tia_stretch(img, self.segment)
class TIAPerspective(ImageOnlyTransform):
def __init__(self, always_apply=False, p=0.5):
super().__init__(always_apply=always_apply, p=p)
def apply(self, img, **params):
return tia_perspective(img) | [
"[email protected]"
]
| |
3a2fbdf9a60161ddf6d5cf18989eb64852a88458 | 28837c1d50c453fa759f6df623e31626dc38002c | /uproot/_help.py | 2e7e981989505870ac3ec39bff6ec97af9480fca | [
"BSD-3-Clause"
]
| permissive | henryiii/uproot | 7118ac464ce0e944682ddcdaf485c8f4a0401ded | 28b5331658aa55c69a587535b34de795abfea966 | refs/heads/master | 2020-03-25T04:28:35.594485 | 2018-07-28T10:27:19 | 2018-07-28T10:27:19 | 143,397,052 | 0 | 0 | null | 2018-08-03T07:59:17 | 2018-08-03T07:59:16 | null | UTF-8 | Python | false | false | 94,033 | py | #!/usr/bin/env python
# Copyright (c) 2017, DIANA-HEP
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import uproot
import uproot._connect.to_pandas
def _method(x):
if hasattr(x, "__func__"):
return x.__func__
else:
return x
################################################################ uproot.rootio fragments
open_fragments = {
# localsource
"localsource": u"""localsource : function: path \u21d2 :py:class:`Source <uproot.source.source.Source> or ``dict`` of keyword arguments`
function that will be applied to the path to produce an uproot :py:class:`Source <uproot.source.source.Source>` object if the path is a local file. Default is :py:meth:`MemmapSource.defaults <uproot.source.memmap.MemmapSource.defaults>` for memory-mapped files. If a ``dict``, the ``dict`` is passed as keyword arguments to :py:class:`MemmapSource <uproot.source.memmap.MemmapSource>` constructor.""",
# xrootdsource
"xrootdsource": u"""xrootdsource : function: path \u21d2 :py:class:`Source <uproot.source.source.Source> or ``dict`` of keyword arguments`
function that will be applied to the path to produce an uproot :py:class:`Source <uproot.source.source.Source>` object if the path is an XRootD URL. Default is :py:meth:`XRootDSource.defaults <uproot.source.xrootd.XRootDSource.defaults>` for XRootD with default chunk size/caching. (See :py:class:`XRootDSource <uproot.source.xrootd.XRootDSource>` constructor for details.) If a ``dict``, the ``dict`` is passed as keyword arguments to :py:class:`XRootDSource <uproot.source.xrootd.XRootDSource>` constructor.""",
# options
"options": u"""**options
passed to :py:class:`ROOTDirectory <uproot.rootio.ROOTDirectory>` constructor.""",
}
rootdirectory_fragments = {
# recursive
"recursive": u"""recursive : bool
if ``False`` *(default)*, only iterate over this directory level; if ``True``, depth-first iterate over all subdirectories as well.""",
# filtername
"filtername": u"""filtername : function: str \u21d2 bool
only keys for which ``filtername(name)`` returns ``True`` are returned (does not eliminate subdirectories if ``recursive=True``). Default returns ``True`` for all input.""",
# filterclass
"filterclass": u"""filterclass : function: class object \u21d2 bool
only keys for which ``filterclass(class object)`` returns ``True`` are returned (does not eliminate subdirectories if ``recursive=True``). Default returns ``True`` for all input. Note that all class objects passed to this function have a ``classname`` attribute for the C++ class name (may differ from the Python class name for syntactic reasons).""",
}
################################################################ uproot.rootio.open
uproot.rootio.open.__doc__ = \
u"""Opens a ROOT file (local or remote), specified by file path.
Parameters
----------
path : str
local file path or URL specifying the location of a file (note: not a Python file object!). If the URL schema is "root://", :py:func:`xrootd <uproot.xrootd>` will be called.
{localsource}
{xrootdsource}
{options}
Returns
-------
:py:class:`ROOTDirectory <uproot.rootio.ROOTDirectory>`
top-level directory of the ROOT file.
Notes
-----
The ROOTDirectory returned by this function is not necessarily an open file. File handles are managed internally by :py:class:`Source <uproot.source.source.Source>` objects to permit parallel reading. Although this function can be used in a ``with`` construct (which protects against unclosed files), the ``with`` construct has no meaning when applied to this function. Files will be opened or closed as needed to read data on demand.
""".format(**open_fragments)
################################################################ uproot.rootio.xrootd
uproot.rootio.xrootd.__doc__ = \
u"""Opens a remote ROOT file with XRootD (if installed).
Parameters
----------
path : str
URL specifying the location of a file.
{xrootdsource}
{options}
Returns
-------
:py:class:`ROOTDirectory <uproot.rootio.ROOTDirectory>`
top-level directory of the ROOT file.
""".format(**open_fragments)
################################################################ uproot.rootio.ROOTDirectory
uproot.rootio.ROOTDirectory.__doc__ = \
u"""Represents a ROOT file or directory, an entry point for reading objects.
Although this class has a constructor that could be called by a user, objects are usually created from ROOT files through :py:func:`open <uproot.rootio.open>` or :py:func:`xrootd <uproot.rootio.xrootd>`.
:py:class:`ROOTDirectory <uproot.rootio.ROOTDirectory>` objects may be accessed as Python containers:
- square brackets (``__getitem__``) read objects from the file by key name (see :py:meth:`get <uproot.rootio.ROOTDirectory.get>`).
- the ``len`` function (``__len__``) returns the number of keys.
- iteration (``__iter__``) iterates over the *names* of the keys only (like a ``dict``, see :py:meth:`keys <uproot.rootio.ROOTDirectory.keys>`).
**Attributes, properties, and methods:**
- **name** (*bytes*) name of the file or directory *as read from the ROOT file*. (ROOT files may be imprinted with a different name than they have in the file system.)
- **compression** (:py:class:`Compression <uproot.source.compressed.Compression>`) the compression algorithm and level specified in the file header. (Some objects, including TTree branches, may have different compression settings than the global file settings.)
- :py:meth:`get <uproot.rootio.ROOTDirectory.get>` read an object from the file, selected by name.
- :py:meth:`iterkeys <uproot.rootio.ROOTDirectory.iterkeys>` iterate over key names in this directory.
- :py:meth:`itervalues <uproot.rootio.ROOTDirectory.itervalues>` iterate over objects in this directory.
- :py:meth:`iteritems <uproot.rootio.ROOTDirectory.iteritems>` iterate over *(key name, object)* pairs in this directory, like a ``dict``.
- :py:meth:`iterclasses <uproot.rootio.ROOTDirectory.iterclasses>` iterate over *(key name, class object)* pairs in this directory.
- :py:meth:`keys <uproot.rootio.ROOTDirectory.keys>` return key names in this directory.
- :py:meth:`values <uproot.rootio.ROOTDirectory.values>` return objects in this directory.
- :py:meth:`items <uproot.rootio.ROOTDirectory.items>` return *(key name, object)* pairs in this directory, like a ``dict``.
- :py:meth:`classes <uproot.rootio.ROOTDirectory.classes>` return *(key name, class object)* pairs in this directory.
- :py:meth:`allkeys <uproot.rootio.ROOTDirectory.allkeys>` return keys at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`keys <uproot.rootio.ROOTDirectory.keys>`).
- :py:meth:`allvalues <uproot.rootio.ROOTDirectory.allvalues>` return objects at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`values <uproot.rootio.ROOTDirectory.values>`).
- :py:meth:`allitems <uproot.rootio.ROOTDirectory.allitems>` return *(key name, object)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`items <uproot.rootio.ROOTDirectory.items>`).
- :py:meth:`allclasses <uproot.rootio.ROOTDirectory.allclasses>` return *(key name, class object)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`classes <uproot.rootio.ROOTDirectory.classes>`).
"""
_method(uproot.rootio.ROOTDirectory.get).__doc__ = \
u"""Read an object from the ROOT file or directory by name.
Parameters
----------
name : str (str)
name of the object. Any text before a "``/``" is interpreted as a subdirectory, and subdirectories of any depth may be searched. A number after a "``;``" indicates a `TKey <uproot.rootio.TKey>` cycle.
cycle : ``None`` or int
`TKey <uproot.rootio.TKey>` cycle number to disambiguate keys of the same name. This argument overrides a number after a "``;``".
Returns
-------
:py:class:`ROOTStreamedObject <uproot.rootio.ROOTStreamedObject>`
a freshly read object from the ROOT file.
Notes
-----
This method, without the ``cycle`` argument, can be accessed more directly through square brackets (``__getitem__``) on the :py:class:`ROOTDirectory <uproot.rootio.ROOTDirectory>` object.
""".format(**rootdirectory_fragments)
_method(uproot.rootio.ROOTDirectory.iterkeys).__doc__ = \
u"""Iterate over key names in this directory.
This method does not read objects.
Parameters
----------
{recursive}
{filtername}
{filterclass}
Returns
-------
iterator over bytes
names of objects and subdirectories in the file.
Notes
-----
This method can be accessed more directly by simply iterating over a :py:class:`ROOTDirectory <uproot.rootio.ROOTDirectory>` object.
""".format(**rootdirectory_fragments)
_method(uproot.rootio.ROOTDirectory.itervalues).__doc__ = \
u"""Iterate over objects in this directory.
Parameters
----------
{recursive}
{filtername}
{filterclass}
Returns
-------
iterator over :py:class:`ROOTStreamedObject <uproot.rootio.ROOTStreamedObject>`
freshly read objects from the ROOT file.
""".format(**rootdirectory_fragments)
_method(uproot.rootio.ROOTDirectory.iteritems).__doc__ = \
u"""Iterate over *(key name, object)* pairs in this directory, like a ``dict``.
Parameters
----------
{recursive}
{filtername}
{filterclass}
Returns
-------
iterator over (bytes, :py:class:`ROOTStreamedObject <uproot.rootio.ROOTStreamedObject>`)
name-object pairs from the file.
""".format(**rootdirectory_fragments)
_method(uproot.rootio.ROOTDirectory.iterclasses).__doc__ = \
u"""Iterate over *(key name, class object)* pairs in this directory.
This method does not read objects.
Parameters
----------
{recursive}
{filtername}
{filterclass}
Returns
-------
iterator over (bytes, class object)
name-class object pairs from the file.
""".format(**rootdirectory_fragments)
_method(uproot.rootio.ROOTDirectory.keys).__doc__ = \
u"""Return key names in this directory.
This method does not read objects.
Parameters
----------
{recursive}
{filtername}
{filterclass}
Returns
-------
list of bytes
names of objects and subdirectories in the file.
""".format(**rootdirectory_fragments)
_method(uproot.rootio.ROOTDirectory.values).__doc__ = \
u"""Return objects in this directory.
Parameters
----------
{recursive}
{filtername}
{filterclass}
Returns
-------
list of :py:class:`ROOTStreamedObject <uproot.rootio.ROOTStreamedObject>`
freshly read objects from the ROOT file.
""".format(**rootdirectory_fragments)
_method(uproot.rootio.ROOTDirectory.items).__doc__ = \
u"""Return *(key name, object)* pairs in this directory, like a ``dict``.
Parameters
----------
{recursive}
{filtername}
{filterclass}
Returns
-------
list of (bytes, :py:class:`ROOTStreamedObject <uproot.rootio.ROOTStreamedObject>`)
name-object pairs from the file.
""".format(**rootdirectory_fragments)
_method(uproot.rootio.ROOTDirectory.classes).__doc__ = \
u"""Return *(key name, class object)* pairs in this directory.
This method does not read objects.
Parameters
----------
{recursive}
{filtername}
{filterclass}
Returns
-------
list of (bytes, class object)
name-class object pairs from the file.
""".format(**rootdirectory_fragments)
_method(uproot.rootio.ROOTDirectory.allkeys).__doc__ = \
u"""Return keys at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`keys <uproot.rootio.ROOTDirectory.keys>`).
This method does not read objects.
Parameters
----------
{filtername}
{filterclass}
Returns
-------
list of bytes
names of objects and subdirectories in the file.
""".format(**rootdirectory_fragments)
_method(uproot.rootio.ROOTDirectory.allvalues).__doc__ = \
u"""Return objects at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`values <uproot.rootio.ROOTDirectory.values>`).
Parameters
----------
{filtername}
{filterclass}
Returns
-------
list of :py:class:`ROOTStreamedObject <uproot.rootio.ROOTStreamedObject>`
freshly read objects from the ROOT file.
""".format(**rootdirectory_fragments)
_method(uproot.rootio.ROOTDirectory.allitems).__doc__ = \
u"""Return *(key name, object)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`items <uproot.rootio.ROOTDirectory.items>`).
Parameters
----------
{filtername}
{filterclass}
Returns
-------
list of (bytes, :py:class:`ROOTStreamedObject <uproot.rootio.ROOTStreamedObject>`)
name-object pairs from the file.
""".format(**rootdirectory_fragments)
_method(uproot.rootio.ROOTDirectory.allclasses).__doc__ = \
u"""Return *(key name, class object)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`classes <uproot.rootio.ROOTDirectory.classes>`).
This method does not read objects.
Parameters
----------
{filtername}
{filterclass}
Returns
-------
list of (bytes, class object)
name-class object pairs from the file.
""".format(**rootdirectory_fragments)
################################################################ uproot.rootio.ROOTObject and uproot.rootio.ROOTStreamedObject
uproot.rootio.ROOTObject.__doc__ = \
u"""Superclass of all objects read out of a ROOT file (except :py:class:`ROOTDirectory <uproot.rootio.ROOTDirectory>`).
If a :py:class:`ROOTObject <uproot.rootio.ROOTObject>` is not a :py:class:`ROOTStreamedObject <uproot.rootio.ROOTStreamedObject>`, then its class definition is hard-coded, not derived from the file's *streamer info*.
"""
uproot.rootio.ROOTStreamedObject.__doc__ = \
u"""Superclass of all objects read out of a ROOT file with an automatically generated class, derived from the file's *streamer info*.
Each subclass of a :py:class:`ROOTStreamedObject <uproot.rootio.ROOTStreamedObject>` has a ``classversion`` attribute, corresponding to the class version in the *streamer info*. If this version does not match the version of the serialized class, an error is raised during the read.
"""
################################################################ uproot.tree fragments
tree_fragments = {
# entrystart
"entrystart": u"""entrystart : ``None`` or int
entry at which reading starts (inclusive). If ``None`` *(default)*, start at the beginning of the branch.""",
# entrystop
"entrystop": u"""entrystop : ``None`` or int
entry at which reading stops (exclusive). If ``None`` *(default)*, stop at the end of the branch.""",
# entrysteps
"entrysteps": u"""entrysteps : ``None``, positive int, or iterable of *(int, int)* pairs
if ``None`` *(default)*, iterate in steps of TTree clusters (number of entries for which all branches' baskets align); if an integer, iterate in steps of equal numbers of entries (except at the end of a file); otherwise, iterate in explicit, user-specified *(start, stop)* intervals ("start" is inclusive and "stop" is exclusive).""",
# branch
"branch": u"""branch : str
name of the branch to read.""",
# interpretation
"interpretation": u"""interpretation : ``None`` or :py:class:`Interpretation <uproot.interp.interp.Interpretation>`
the meaning imposed upon the bytes of the file and the ultimate form to instantiate. If ``None`` *(default)*, :py:func:`interpret <uproot.interp.auto.interpret>` will be applied to the branch to generate an interpretation.""",
# branches
"branches": u"""branches
- if ``None`` *(default)*, select all *interpretable* branches;
- if a function :py:class:`TBranchMethods <uproot.tree.TBranchMethods>` \u21d2 ``None`` or :py:class:`Interpretation <uproot.interp.interp.Interpretation>`, select branches for which the function does not return ``None`` and use the interpretation it returns otherwise;
- if a ``dict`` of str \u2192 :py:class:`Interpretation <uproot.interp.interp.Interpretation>`, select branches named by keys and use interpretations from the associated values;
- if a list of str, select branches by name;
- if a single str, select a single branch. The selection by string can include filename-like glob characters (``*``, ``?``, ``[...]``) or it can be a full regular expression (Python flavored) if surrounded by slashes, like ``/pattern/i`` (where ``i`` is an optional `Python re flag <https://docs.python.org/2/library/re.html>`_).""",
# outputtype
"outputtype": u"""outputtype : type
constructor for the desired yield type, such as ``dict`` *(default)*, ``OrderedDict``, ``tuple``, ``namedtuple``, custom user class, etc.""",
# reportentries
"reportentries": u"""reportentries : bool
if ``False`` *(default)*, yield only arrays (as ``outputtype``); otherwise, yield 3-tuple: *(entry start, entry stop, arrays)*, where *entry start* is inclusive and *entry stop* is exclusive.""",
# flatten
"flatten": u"""flatten : bool
if ``True`` *(not default)*, convert JaggedArrays into flat Numpy arrays.""",
# cache
"cache": u"""cache : ``None`` or ``dict``-like object
if not ``None`` *(default)*, fully interpreted arrays will be saved in the ``dict``-like object for later use. Accessing the same arrays with a different interpretation or a different entry range results in a cache miss.""",
# basketcache
"basketcache": u"""basketcache : ``None`` or ``dict``-like object
if not ``None`` *(default)*, raw basket data will be saved in the ``dict``-like object for later use. Accessing the same arrays with a different interpretation or a different entry range fully utilizes this cache, since the interpretation/construction from baskets is performed after retrieving data from this cache.""",
# keycache
"keycache": u"""keycache : ``None`` or ``dict``-like object
if not ``None`` *(default)*, basket TKeys will be saved in the ``dict``-like object for later use. TKeys are small, but require file access, so caching them can speed up repeated access.""",
# executor
"executor": u"""executor : `concurrent.futures.Executor <https://docs.python.org/3/library/concurrent.futures.html>`_
if not ``None`` *(default)*, parallelize basket-reading and decompression by scheduling tasks on the executor. Assumes caches are thread-safe.""",
# blocking
"blocking": u"""blocking : bool
if ``True`` *(default)*, do not exit this function until the arrays are read, and return those arrays. If ``False``, exit immediately and return a zero-argument function. That zero-argument function returns the desired array, and it blocks until the array is available. This option is only useful with a non-``None`` executor.""",
# recursive
"recursive": u"""recursive : bool
if ``False`` *(default)*, only iterate at this tree/branch level; if ``True``, depth-first iterate over all subbranches as well.""",
# filtername
"filtername": u"""filtername : function: str \u21d2 bool
only branches for which ``filtername(name)`` returns ``True`` are returned. Default returns ``True`` for all input.""",
# filtertitle
"filtertitle": u"""filtertitle : function: str \u21d2 bool
only branches for which ``filtertitle(title)`` returns ``True`` are returned. Default returns ``True`` for all input.""",
# i
"i": u"""i : non-negative int
basket number (must be greater than or equal to zero and strictly less than *numbaskets*)."""
}
################################################################ uproot.tree.iterate
uproot.tree.iterate.__doc__ = \
u"""Opens a series of ROOT files (local or remote), yielding the same number of entries from all selected branches in each step.
Depending on the "entrysteps" parameter, the number of entries in one step may differ from the number of entries in the next step, but in every step, the same number of entries is retrieved from all *baskets.*
All but the first two parameters are identical to :py:meth:`uproot.tree.TreeMethods.iterate`.
Parameters
----------
path : str or list of str
glob pattern(s) for local file paths (POSIX wildcards like "``*``") or URLs specifying the locations of the files. A list of filenames are processed in the given order, but glob patterns get pre-sorted to ensure a predictable order.
treepath : str
path within each ROOT file to find the TTree (may include "``/``" for subdirectories or "``;``" for cycle numbers).
{branches}
{entrysteps}
{outputtype}
{reportentries}
{flatten}
{cache}
{basketcache}
{keycache}
{executor}
{blocking}
{localsource}
{xrootdsource}
{options}
Returns
-------
iterator over (int, int, outputtype) (if *reportentries*) or just outputtype (otherwise)
aligned array segments from the files.
""".format(**dict(list(open_fragments.items()) + list(tree_fragments.items())))
################################################################ uproot.tree.TTreeMethods
uproot.tree.TTreeMethods.__doc__ = \
u"""Adds array reading methods to TTree objects that have been streamed from a ROOT file.
- square brackets (``__getitem__``) returns a branch by name (see :py:meth:`get <uproot.tree.TTreeMethods.get>`).
- the ``len`` function (``__len__``) returns the number of entries (same as ``numentries``).
- iteration (``__iter__``) has no implementation. This is to avoid confusion between iterating over all branches (probably not what you want, but fitting the pattern set by :py:class:`ROOTDirectory <uproot.rootio.ROOTDirectory>` and ``dict``) and iterating over the data.
**Attributes, properties, and methods:**
- **name** (*bytes*) name of the TTree.
- **title** (*bytes*) title of the TTree.
- **numentries** (*int*) number of entries in the TTree (same as ``len``).
- **pandas** connector to `Pandas <http://pandas.pydata.org/>`_ functions
- :py:meth:`get <uproot.tree.TTreeMethods.get>` return a branch by name (at any level of depth).
- :py:meth:`iterkeys <uproot.tree.TTreeMethods.iterkeys>` iterate over branch names.
- :py:meth:`itervalues <uproot.tree.TTreeMethods.itervalues>` iterate over branches.
- :py:meth:`iteritems <uproot.tree.TTreeMethods.iteritems>` iterate over *(branch name, branch)* pairs.
- :py:meth:`keys <uproot.tree.TTreeMethods.keys>` return branch names.
- :py:meth:`values <uproot.tree.TTreeMethods.values>` return branches.
- :py:meth:`items <uproot.tree.TTreeMethods.items>` return *(branch name, branch)* pairs.
- :py:meth:`allkeys <uproot.tree.TTreeMethods.allkeys>` return branch names at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`keys <uproot.tree.TTreeMethods.keys>`).
- :py:meth:`allvalues <uproot.tree.TTreeMethods.allvalues>` return branches at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`values <uproot.tree.TTreeMethods.values>`).
- :py:meth:`allitems <uproot.tree.TTreeMethods.allitems>` return *(branch name, branch)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`items <uproot.tree.TTreeMethods.items>`).
- :py:meth:`clusters <uproot.tree.TTreeMethods.clusters>` iterate over *(int, int)* pairs representing cluster entry starts and stops in this TTree *(not implemented)*.
**Methods for reading array data:**
- :py:meth:`array <uproot.tree.TTreeMethods.array>` read one branch into an array (or other object if provided an alternate *interpretation*).
- :py:meth:`lazyarray <uproot.tree.TTreeMethods.lazyarray>` create a lazy array that would read the branch as needed.
- :py:meth:`arrays <uproot.tree.TTreeMethods.arrays>` read many branches into arrays (or other objects if provided alternate *interpretations*).
- :py:meth:`lazyarrays <uproot.tree.TTreeMethods.lazyarrays>` create many lazy arrays.
- :py:meth:`iterate <uproot.tree.TTreeMethods.iterate>` iterate over many arrays at once, yielding the same number of entries from all selected branches in each step.
"""
_method(uproot.tree.TTreeMethods.get).__doc__ = \
u"""Return a branch by name (at any level of depth).
Parameters
----------
name : str
name of the branch to return.
Returns
-------
:py:class:`TBranch <upoot.tree.TBranchMethods>`
selected branch.
Notes
-----
This method can be accessed more directly through square brackets (``__getitem__``) on the :py:class:`TTree <uproot.tree.TTreeMethods>` object.
"""
_method(uproot.tree.TTreeMethods.iterkeys).__doc__ = \
u"""Iterate over branch names.
Parameters
----------
{recursive}
{filtername}
{filtertitle}
Returns
-------
iterator over bytes
names of branches.
""".format(**tree_fragments)
_method(uproot.tree.TTreeMethods.itervalues).__doc__ = \
u"""Iterate over branches.
Parameters
----------
{recursive}
{filtername}
{filtertitle}
Returns
-------
iterator over :py:class:`TBranch <uproot.tree.TBranchMethods>`
branches.
""".format(**tree_fragments)
_method(uproot.tree.TTreeMethods.iteritems).__doc__ = \
u"""Iterate over *(branch name, branch)* pairs.
Parameters
----------
{recursive}
{filtername}
{filtertitle}
Returns
-------
iterator over (bytes, :py:class:`TBranch <uproot.tree.TBranchMethods>`)
name-branch pairs.
""".format(**tree_fragments)
_method(uproot.tree.TTreeMethods.keys).__doc__ = \
u"""Return branch names.
Parameters
----------
{recursive}
{filtername}
{filtertitle}
Returns
-------
list of bytes
names of branches.
""".format(**tree_fragments)
_method(uproot.tree.TTreeMethods.values).__doc__ = \
u"""Return branches.
Parameters
----------
{recursive}
{filtername}
{filtertitle}
Returns
-------
list of :py:class:`TBranch <uproot.tree.TBranchMethods>`
branches.
""".format(**tree_fragments)
_method(uproot.tree.TTreeMethods.items).__doc__ = \
u"""Return *(branch name, branch)* pairs.
Parameters
----------
{recursive}
{filtername}
{filtertitle}
Returns
-------
list of (bytes, :py:class:`TBranch <uproot.tree.TBranchMethods>`)
name-branch pairs.
""".format(**tree_fragments)
_method(uproot.tree.TTreeMethods.allkeys).__doc__ = \
u"""Return branch names at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`keys <uproot.tree.TTreeMethods.keys>`).
Parameters
----------
{filtername}
{filtertitle}
Returns
-------
list of bytes
names of branches.
""".format(**tree_fragments)
_method(uproot.tree.TTreeMethods.allvalues).__doc__ = \
u"""Return branches at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`values <uproot.tree.TTreeMethods.values>`).
Parameters
----------
{filtername}
{filtertitle}
Returns
-------
list of :py:class:`TBranch <uproot.tree.TBranchMethods>`
branches.
""".format(**tree_fragments)
_method(uproot.tree.TTreeMethods.allitems).__doc__ = \
u"""Return *(branch name, branch)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`items <uproot.tree.TTreeMethods.items>`).
Parameters
----------
{filtername}
{filtertitle}
Returns
-------
list of (bytes, :py:class:`TBranch <uproot.tree.TBranchMethods>`
name-branch pairs.
""".format(**tree_fragments)
_method(uproot.tree.TTreeMethods.clusters).__doc__ = \
u"""Return *(int, int)* pairs representing cluster entry starts and stops in this TTree.
.. todo:: Not implemented.
Returns
-------
list of (int, int)
start (inclusive) and stop (exclusive) pairs for each cluster.
"""
_method(uproot.tree.TTreeMethods.array).__doc__ = \
u"""Read one branch into an array (or other object if provided an alternate *interpretation*).
Parameters
----------
{branch}
{interpretation}
{entrystart}
{entrystop}
{flatten}
{cache}
{basketcache}
{keycache}
{executor}
{blocking}
Returns
-------
array or other object, depending on *interpretation*.
""".format(**tree_fragments)
_method(uproot.tree.TTreeMethods.lazyarray).__doc__ = \
u"""Create a lazy array that would read the branch as needed.
Parameters
----------
{branch}
{interpretation}
{cache}
{basketcache}
{keycache}
{executor}
Returns
-------
lazy array (square brackets initiate data reading)
lazy version of the array.
""".format(**tree_fragments)
_method(uproot.tree.TTreeMethods.arrays).__doc__ = \
u"""Read many branches into arrays (or other objects if provided alternate *interpretations*).
Parameters
----------
{branches}
{outputtype}
{entrystart}
{entrystop}
{flatten}
{cache}
{basketcache}
{keycache}
{executor}
{blocking}
Returns
-------
outputtype of arrays or other objects, depending on *interpretation*
branch data.
""".format(**tree_fragments)
_method(uproot.tree.TTreeMethods.lazyarrays).__doc__ = \
u"""Create many lazy arrays.
Parameters
----------
{branches}
{outputtype}
{cache}
{basketcache}
{keycache}
{executor}
Returns
-------
outputtype of lazy arrays (square brackets initiate data reading)
lazy branch data.
""".format(**tree_fragments)
_method(uproot.tree.TTreeMethods.iterate).__doc__ = \
u"""Iterate over many arrays at once, yielding the same number of entries from all selected branches in each step.
Depending on the "entrysteps" parameter, the number of entries in one step may differ from the number of entries in the next step, but in every step, the same number of entries is retrieved from all *baskets.*
Parameters
----------
{branches}
{entrysteps}
{outputtype}
{reportentries}
{entrystart}
{entrystop}
{flatten}
{cache}
{basketcache}
{keycache}
{executor}
{blocking}
Returns
-------
iterator over (int, int, outputtype) (if *reportentries*) or just outputtype (otherwise)
aligned array segments from the TTree.
""".format(**tree_fragments)
################################################################ uproot.tree.TBranchMethods
uproot.tree.TBranchMethods.__doc__ = \
u"""Adds array reading methods to TBranch objects that have been streamed from a ROOT file.
- square brackets (``__getitem__``) returns a subbranch by name (see :py:meth:`get <uproot.tree.TBranchMethods.get>`).
- the ``len`` function (``__len__``) returns the number of entries (same as ``numentries``).
- iteration (``__iter__``) has no implementation. This is to avoid confusion between iterating over all subbranches (probably not what you want, but fitting the pattern set by :py:class:`ROOTDirectory <uproot.rootio.ROOTDirectory>` and ``dict``) and iterating over the data.
**Attributes, properties, and methods:**
- **name** (*bytes*) name of the TBranch.
- **title** (*bytes*) title of the TBranch.
- **compression** (:py:class:`Compression <uproot.source.compressed.Compression>`) the compression algorithm and level specified in the TBranch header. (Actual compression used may differ.)
- :py:meth:`get <uproot.tree.TBranchMethods.get>` return a subbranch by name (at any level of depth).
- :py:meth:`iterkeys <uproot.tree.TBranchMethods.iterkeys>` iterate over subbranch names.
- :py:meth:`itervalues <uproot.tree.TBranchMethods.itervalues>` iterate over subbranches.
- :py:meth:`iteritems <uproot.tree.TBranchMethods.iteritems>` iterate over *(subbranch name, subbranch)* pairs.
- :py:meth:`keys <uproot.tree.TBranchMethods.keys>` return subbranch names.
- :py:meth:`values <uproot.tree.TBranchMethods.values>` return subbranches.
- :py:meth:`items <uproot.tree.TBranchMethods.items>` return *(subbranch name, subbranch)* pairs.
- :py:meth:`allkeys <uproot.tree.TBranchMethods.allkeys>` return subbranch names at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`keys <uproot.tree.TBranchMethods.keys>`).
- :py:meth:`allvalues <uproot.tree.TBranchMethods.allvalues>` return subbranches at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`values <uproot.tree.TBranchMethods.values>`).
- :py:meth:`allitems <uproot.tree.TBranchMethods.allitems>` return *(subbranch name, subbranch)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`items <uproot.tree.TBranchMethods.items>`).
**Branch information:**
- **numentries** (*int*) number of entries in the TBranch (same as ``len``).
- **numbaskets** (*int*) number of baskets in the TBranch.
- :py:meth:`uncompressedbytes <uproot.tree.TBranchMethods.uncompressedbytes>` the number of bytes contained in the TBranch (data and offsets; not including any key headers) *after* decompression, if applicable.
- :py:meth:`compressedbytes <uproot.tree.TBranchMethods.compressedbytes>` the number of bytes contained in the TBranch (data and offsets; not including any key headers) *before* decompression, if applicable.
- :py:meth:`compressionratio <uproot.tree.TBranchMethods.compressionratio>` the uncompressed bytes divided by compressed bytes (greater than or equal to 1).
- :py:meth:`numitems <uproot.tree.TBranchMethods.numitems>` the number of items in the TBranch, under a given interpretation.
**Basket information:**
- :py:meth:`basket_entrystart <uproot.tree.TBranchMethods.basket_entrystart>` the starting entry for a given basket (inclusive).
- :py:meth:`basket_entrystop <uproot.tree.TBranchMethods.basket_entrystop>` the stopping entry for a given basket (exclusive).
- :py:meth:`basket_numentries <uproot.tree.TBranchMethods.basket_numentries>` the number of entries in a given basket.
- :py:meth:`basket_uncompressedbytes <uproot.tree.TBranchMethods.basket_uncompressedbytes>` the number of bytes contained in the basket (data and offsets; not including any key headers) *after* decompression, if applicable.
- :py:meth:`basket_compressedbytes <uproot.tree.TBranchMethods.basket_compressedbytes>` the number of bytes contained in the basket (data and offsets; not including any key headers) *before* decompression, if applicable.
- :py:meth:`basket_numitems <uproot.tree.TBranchMethods.basket_numitems>` the number of items in the basket, under a given interpretation.
**Methods for reading array data:**
- :py:meth:`array <uproot.tree.TBranchMethods.array>` read the branch into an array (or other object if provided an alternate *interpretation*).
- :py:meth:`lazyarray <uproot.tree.TBranchMethods.lazyarray>` create a lazy array that would read the branch as needed.
- :py:meth:`basket <uproot.tree.TBranchMethods.basket>` read a single basket into an array.
- :py:meth:`baskets <uproot.tree.TBranchMethods.baskets>` read baskets into a list of arrays.
- :py:meth:`iterate_baskets <uproot.tree.TBranchMethods.iterate_baskets>` iterate over baskets.
"""
_method(uproot.tree.TBranchMethods.get).__doc__ = \
u"""Return a subbranch by name (at any level of depth).
Parameters
----------
name : str
name of the subbranch to return.
Returns
-------
:py:class:`TBranch <upoot.tree.TBranchMethods>`
branch object.
Notes
-----
This method can be accessed more directly through square brackets (``__getitem__``) on the :py:class:`TBranch <uproot.tree.TBranchMethods>` object.
"""
_method(uproot.tree.TBranchMethods.iterkeys).__doc__ = \
u"""Iterate over subbranch names.
Parameters
----------
{recursive}
{filtername}
{filtertitle}
Returns
-------
iterator over bytes
subbranch names.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.itervalues).__doc__ = \
u"""Iterate over subbranches.
Parameters
----------
{recursive}
{filtername}
{filtertitle}
Returns
-------
iterator over :py:class:`TBranch <uproot.tree.TBranchMethods>`
subbranches.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.iteritems).__doc__ = \
u"""Iterate over *(subbranch name, subbranch)* pairs.
Parameters
----------
{recursive}
{filtername}
{filtertitle}
Returns
-------
iterator over (bytes, :py:class:`TBranch <uproot.tree.TBranchMethods>`)
*(subbranch name, subbranch)* pairs.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.keys).__doc__ = \
u"""Return subbranch names.
Parameters
----------
{recursive}
{filtername}
{filtertitle}
Returns
-------
list of bytes
subbranch names.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.values).__doc__ = \
u"""Return subbranches.
Parameters
----------
{recursive}
{filtername}
{filtertitle}
Returns
-------
list of :py:class:`TBranch <uproot.tree.TBranchMethods>`
subbranches.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.items).__doc__ = \
u"""Return *(subbranch name, subbranch)* pairs.
Parameters
----------
{recursive}
{filtername}
{filtertitle}
Returns
-------
list of (bytes, :py:class:`TBranch <uproot.tree.TBranchMethods>`)
*(subbranch name, subbranch)* pairs.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.allkeys).__doc__ = \
u"""Return subbranch names at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`keys <uproot.tree.TBranchMethods.keys>`).
Parameters
----------
{filtername}
{filtertitle}
Returns
-------
list of bytes
subbranch names.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.allvalues).__doc__ = \
u"""Return subbranches at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`values <uproot.tree.TBranchMethods.values>`).
Parameters
----------
{filtername}
{filtertitle}
Returns
-------
list of :py:class:`TBranch <uproot.tree.TBranchMethods>`
subbranches.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.allitems).__doc__ = \
u"""Return *(subbranch name, subbranch)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`items <uproot.tree.TBranchMethods.items>`).
Parameters
----------
{filtername}
{filtertitle}
Returns
-------
list of (bytes, :py:class:`TBranch <uproot.tree.TBranchMethods>`
(subbranch name, subbranch)* pairs.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.uncompressedbytes).__doc__ = \
u"""The number of bytes contained in the TBranch (data and offsets; not including any key headers) *after* decompression, if applicable.
Parameters
----------
{keycache}
Returns
-------
int
uncompressed bytes.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.compressedbytes).__doc__ = \
u"""The number of bytes contained in the TBranch (data and offsets; not including any key headers) *before* decompression, if applicable.
Parameters
----------
{keycache}
Returns
-------
int
compressed bytes.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.compressionratio).__doc__ = \
u"""The uncompressed bytes divided by compressed bytes (greater than or equal to 1).
Parameters
----------
{keycache}
Returns
-------
float
compression ratio.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.numitems).__doc__ = \
u"""The number of items in the TBranch, under a given interpretation.
Parameters
----------
{interpretation}
{keycache}
Returns
-------
int
number of items.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.basket_entrystart).__doc__ = \
u"""The starting entry for a given basket (inclusive).
Parameters
----------
{i}
Returns
-------
int
starting entry.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.basket_entrystop).__doc__ = \
u"""The stopping entry for a given basket (exclusive).
Parameters
----------
{i}
Returns
-------
int
stopping entry.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.basket_numentries).__doc__ = \
u"""The number of entries in a given basket.
Parameters
----------
{i}
Returns
-------
int
number of entries.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.basket_uncompressedbytes).__doc__ = \
u"""The number of bytes contained in the basket (data and offsets; not including any key headers) *after* decompression, if applicable.
Parameters
----------
{i}
{keycache}
Returns
-------
int
number of uncompressed bytes.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.basket_compressedbytes).__doc__ = \
u"""The number of bytes contained in the basket (data and offsets; not including any key headers) *before* decompression, if applicable.
Parameters
----------
{i}
{keycache}
Returns
-------
int
number of compressed bytes.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.basket_numitems).__doc__ = \
u"""The number of items in the basket, under a given interpretation.
Parameters
----------
{i}
{interpretation}
{keycache}
Returns
-------
int
number of items.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.array).__doc__ = \
u"""Read the branch into an array (or other object if provided an alternate *interpretation*).
Parameters
----------
{interpretation}
{entrystart}
{entrystop}
{flatten}
{cache}
{basketcache}
{keycache}
{executor}
{blocking}
Returns
-------
array or other object, depending on *interpretation*
branch data.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.lazyarray).__doc__ = \
u"""Create a lazy array that would read the branch as needed.
Parameters
----------
{interpretation}
{cache}
{basketcache}
{keycache}
{executor}
Returns
-------
lazy array (square brackets initiate data reading)
lazy version of branch data.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.basket).__doc__ = \
u"""Read a single basket into an array.
Parameters
----------
{i}
{interpretation}
{entrystart}
{entrystop}
{flatten}
{cache}
{basketcache}
{keycache}
Returns
-------
array or other object, depending on *interpretation*
basket data.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.baskets).__doc__ = \
u"""Read baskets into a list of arrays.
Parameters
----------
{interpretation}
{entrystart}
{entrystop}
{flatten}
{cache}
{basketcache}
{keycache}
{reportentries}
{executor}
{blocking}
Returns
-------
list of arrays or other objects, depending on *interpretation*
basket data.
""".format(**tree_fragments)
_method(uproot.tree.TBranchMethods.iterate_baskets).__doc__ = \
u"""Iterate over baskets.
Parameters
----------
{interpretation}
{entrystart}
{entrystop}
{flatten}
{cache}
{basketcache}
{keycache}
{reportentries}
Returns
-------
iterator over arrays or other objects, depending on *interpretation*
basket data.
""".format(**tree_fragments)
################################################################ uproot.tree.TTreeMethods.pandas
_method(uproot._connect.to_pandas.TTreeMethods_pandas.df).__doc__ = \
u"""Create a Pandas DataFrame from some branches.
Parameters
----------
{branches}
{entrystart}
{entrystop}
flatten : bool
if ``True`` *(default)*, convert JaggedArrays into flat Numpy arrays and turn the DataFrame index into a two-level MultiIndex to represent the structure.
{cache}
{basketcache}
{keycache}
{executor}
Returns
-------
Pandas DataFrame
data frame (`see docs <http://pandas.pydata.org/pandas-docs/stable/api.html#dataframe>`_).
""".format(**tree_fragments)
################################################################ uproot.interp.interp.Interpretation
uproot.interp.interp.Interpretation.__doc__ = \
u"""Interface for interpretations.
Interpretations do not need to inherit from this class, but they do need to satisfy the interface described below.
Arrays and other collections are filled from ROOT in two stages: raw bytes from each basket are interpreted as a "source" and sources are copied into a branch-wide collection called the "destination" (often swapping bytes from big-endian to native-endian in the process). Public functions return a finalized destination. The distinction between source and destination (a) compactifies disparate baskets into a contiguous collection and (b) allows the output data to differ from the bytes on disk (byte swapping and other conversions).
Interpretations must implement the following methods:
**identifier**
*(property)* a unique identifier for this interpretation, used as part of the cache key so that stale interpretations are not counted as cache hits.
**empty(self)**
return a zero-entry container (for special cases that can skip complex logic by returning an empty set).
**compatible(self, other)**
return ``True`` if and only if ``self`` and ``other`` interpretations would return equivalent results, such as different source interpretations that fill the same destination.
**numitems(self, numbytes, numentries)**
calculate the number of "items" (whatever that means for a given interpretation, but always greater than or equal to the number of entries), knowing only the number of bytes (``numbytes``) and the number of entries (``numentries``).
**source_numitems(self, source)**
calculate the number of "items" given a ``source`` instance.
**fromroot(self, data, offsets, local_entrystart, local_entrystop)**
produce a source from one basket ``data`` array (dtype ``numpy.uint8``) and its corresponding ``offsets`` array (dtype **numpy.int32** or ``None`` if not present) that has *n + 1* elements for *n* entries: ``offsets[0] == 0 and offsets[-1] == numentries``. The ``local_entrystart`` and ``local_entrystop`` are entry start (inclusive) and stop (exclusive), in which the first entry in the basket is number zero (hence "local"). The result of this operation may be a zero-copy cast of the basket data.
**destination(self, numitems, numentries)**
create or otherwise produce an unfilled destination object, knowing only the number of items (``numitems``) and number of entries (``numentries``).
**fill(self, source, destination, itemstart, itemstop, entrystart, entrystop)**
copy data from one basket``source`` (in its entirety) to part of the ``destination`` (usually a small slice). The items range from ``itemstart`` (inclusive) to ``itemstop`` (exclusive) and the entries range from ``entrystart`` (inclusive) to ``entrystop`` (exclusive). This function returns nothing; it is the only function in this interface called for its side-effects (the rest may be pure functions).
**clip(self, destination, itemstart, itemstop, entrystart, entrystop)**
return a slice of the ``destination`` from ``itemstart`` (inclusive) to ``itemstop`` (exclusive) and from ``entrystart`` (inclusive) to ``entrystop`` (exclusive). This is to trim memory allocated but not used, for instance if the entry range does not align with basket boundaries.
**finalize(self, destination)**
possibly post-process a ``destination`` to make it ready for consumption. This is needed if a different form must be used for filling than should be provided to the user--- for instance, offsets of a jagged array can't be computed when filling sections of it in parallel (sizes can), but the user should receive a jagged array based on offsets for random access.
"""
################################################################ uproot.interp.auto.interpret
uproot.interp.auto.interpret.__doc__ = \
u"""Generate a default interpretation of a branch.
This function is called with default options on each branch in the following methods to generate a default interpretation. You can override the default either by calling this function explicitly with different parameters or by modifying its result.
- :py:meth:`TTreeMethods.array <uproot.tree.TTreeMethods.array>`
- :py:meth:`TTreeMethods.lazyarray <uproot.tree.TTreeMethods.lazyarray>`
- :py:meth:`TTreeMethods.arrays <uproot.tree.TTreeMethods.arrays>`
- :py:meth:`TTreeMethods.lazyarrays <uproot.tree.TTreeMethods.lazyarrays>`
- :py:meth:`TTreeMethods.iterate <uproot.tree.TTreeMethods.iterate>`
- :py:meth:`TTreeMethods.iterate_clusters <uproot.tree.TTreeMethods.iterate_clusters>`
- :py:meth:`TBranchMethods.array <uproot.tree.TBranchMethods.array>`
- :py:meth:`TBranchMethods.lazyarray <uproot.tree.TBranchMethods.lazyarray>`
- :py:meth:`TBranchMethods.basket <uproot.tree.TBranchMethods.basket>`
- :py:meth:`TBranchMethods.baskets <uproot.tree.TBranchMethods.baskets>`
- :py:meth:`TBranchMethods.iterate_baskets <uproot.tree.TBranchMethods.iterate_baskets>`
Parameters
----------
branch : :py:class:`TBranchMethods <uproot.tree.TBranchMethods>`
branch to interpret.
classes : ``None`` or ``dict`` of str \u2192 :py:class:`ROOTStreamedObject <uproot.rootio.ROOTStreamedObject>`
class definitions associated with each class name, usually generated by ROOT file streamers. If ``None`` *(default)*, use the class definitions generated from the file from which this branch was read.
swapbytes : bool
if ``True``, generate an interpretation that converts ROOT's big-endian numbers into the machine-native endianness (usually little-endian).
Returns
-------
:py:class:`Interpretation <uproot.interp.interp.Interpretation>`
the interpretation.
"""
################################################################ uproot.interp fragments
interp_fragments = {
# see1
"see1": u"""Part of the :py:class:`Interpretation <uproot.interp.interp.Interpretation>` interface; type ``help(uproot.interp.interp.Interpretation)`` for details.""",
# see2
"see2": u"""Methods implementing the :py:class:`Interpretation <uproot.interp.interp.Interpretation>` interface are not documented here.""",
}
################################################################ uproot.interp.numerical fragments
interp_numerical_fragments = {
# items
"items": u"""In this interpretation, "items" (for ``numitems``, ``itemstart``, ``itemstop``, etc.) has the same meaning as in Numpy: an item is a single scalar value. For example, 100 entries of 2\u00d72 matrices (``todims == (2, 2)``) is 400 items.""",
# fromdtype
"fromdtype": u"""fromdtype : ``numpy.dtype``
the source type; the meaning associated with bytes in the ROOT file. Should be big-endian (e.g. ``">i4"`` for 32-bit integers and ``">f8"`` for 64-bit floats).""",
# fromdims
"fromdims": u"""fromdims : tuple of ints
Numpy shape of each source entry. The Numpy shape of the whole source array is ``(numentries,) + fromdims``. Default is ``()`` (scalar).""",
}
################################################################ uproot.interp.numerical.asdtype
uproot.interp.numerical.asdtype.__doc__ = \
u"""Interpret branch data as a new Numpy array with given dtypes and dimensions.
This interpretation directs branch-reading functions to allocate new Numpy arrays and fill them with the branch contents. See :py:class:`asarray <uproot.interp.numerical.asarray>` to fill an existing array, rather than filling a new array.
{items}
Parameters
----------
{fromdtype}
todtype : ``None`` or ``numpy.dtype``
the destination type; the conversion performed if different from the source type. If ``None`` *(default)*, the destination type will be a native-endian variant of the source type, so that a byte-swap is performed.
{fromdims}
todims : ``None`` or tuple of ints
Numpy shape of each destination entry. The Numpy shape of the whole destination array is ``(numentries,) + todims``. If ``None`` *(default)*, ``todims`` will be equal to ``fromdims``. Making them different allows you to reshape arrays while reading.
Notes
-----
{see2}
""".format(**dict(list(interp_fragments.items()) + list(interp_numerical_fragments.items())))
_method(uproot.interp.numerical.asdtype.to).__doc__ = \
u"""Create a new :py:class:`asdtype <uproot.interp.numerical.asdtype>` interpretation from this one.
Parameters
----------
todtype : ``None`` or ``numpy.dtype``
if not ``None``, change the destination type.
todims : ``None`` or tuple of ints
if not ``None``, change the destination dimensions.
Returns
-------
:py:class:`asdtype <uproot.interp.numerical.asdtype>`
new interpretation.
"""
_method(uproot.interp.numerical.asdtype.toarray).__doc__ = \
u"""Create a :py:class:`asarray <uproot.interp.numerical.asarray>` interpretation from this one.
Parameters
----------
array : ``numpy.ndarray``
the array to fill, instead of allocating a new one.
Returns
-------
:py:class:`asarray <uproot.interp.numerical.asarray>`
new interpretation.
"""
_method(uproot.interp.numerical.asdtype.empty).__doc__ = interp_fragments["see1"]
_method(uproot.interp.numerical.asdtype.compatible).__doc__ = interp_fragments["see1"]
_method(uproot.interp.numerical.asdtype.numitems).__doc__ = interp_fragments["see1"]
_method(uproot.interp.numerical.asdtype.source_numitems).__doc__ = interp_fragments["see1"]
_method(uproot.interp.numerical.asdtype.fromroot).__doc__ = interp_fragments["see1"]
_method(uproot.interp.numerical.asdtype.destination).__doc__ = interp_fragments["see1"]
_method(uproot.interp.numerical.asdtype.fill).__doc__ = interp_fragments["see1"]
_method(uproot.interp.numerical.asdtype.clip).__doc__ = interp_fragments["see1"]
_method(uproot.interp.numerical.asdtype.finalize).__doc__ = interp_fragments["see1"]
################################################################ uproot.interp.numerical.asarray
uproot.interp.numerical.asarray.__doc__ = \
u"""Interpret branch as array data that should overwrite an existing array.
This interpretation directs branch-reading functions to fill the given Numpy array with branch contents. See :py:class:`asdtype <uproot.interp.numerical.asdtype>` to allocate a new array, rather than filling an existing array.
{items}
Parameters
----------
{fromdtype}
toarray : ``numpy.ndarray``
array to be filled; must be at least as large as the branch data.
{fromdims}
Notes
-----
{see2}
This class has *todtype* and *todims* parameters like :py:class:`asdtype <uproot.interp.numerical.asdtype>`, but they are derived from the *toarray* attribute.
""".format(**dict(list(interp_fragments.items()) + list(interp_numerical_fragments.items())))
_method(uproot.interp.numerical.asarray.destination).__doc__ = interp_fragments["see1"]
_method(uproot.interp.numerical.asarray.fill).__doc__ = interp_fragments["see1"]
_method(uproot.interp.numerical.asarray.clip).__doc__ = interp_fragments["see1"]
_method(uproot.interp.numerical.asarray.finalize).__doc__ = interp_fragments["see1"]
################################################################ uproot.interp.jagged.asjagged
uproot.interp.jagged.asjagged.__doc__ = \
u"""Interpret branch as a jagged array (array of non-uniformly sized arrays).
This interpretation directs branch-reading to fill contiguous arrays and present them to the user in a :py:class:`JaggedArray <uproot.interp.jagged.JaggedArray>` interface. Such an object behaves as though it were an array of non-uniformly sized arrays, but it is more memory and cache-line efficient because the underlying data are contiguous.
In this interpretation, "items" (for ``numitems``, ``itemstart``, ``itemstop``, etc.) are the items of the inner array (however that is defined), and "entries" are elements of the outer array. The outer array is always one-dimensional.
Parameters
----------
asdtype : :py:class:`asdtype <uproot.interp.numerical.asdtype>`
interpretation for the inner arrays.
Notes
-----
{see2}
""".format(**interp_fragments)
_method(uproot.interp.jagged.asjagged.to).__doc__ = \
u"""Create a new :py:class:`asjagged <uproot.interp.jagged.asjagged>` interpretation from this one.
Parameters
----------
todtype : ``None`` or ``numpy.dtype``
if not ``None``, change the destination type of inner arrays.
todims : ``None`` or tuple of ints
if not ``None``, change the destination dimensions of inner arrays.
Returns
-------
:py:class:`asjagged <uproot.interp.jagged.asjagged>`
new interpretation.
"""
_method(uproot.interp.jagged.asjagged.empty).__doc__ = interp_fragments["see1"]
_method(uproot.interp.jagged.asjagged.compatible).__doc__ = interp_fragments["see1"]
_method(uproot.interp.jagged.asjagged.numitems).__doc__ = interp_fragments["see1"]
_method(uproot.interp.jagged.asjagged.source_numitems).__doc__ = interp_fragments["see1"]
_method(uproot.interp.jagged.asjagged.fromroot).__doc__ = interp_fragments["see1"]
_method(uproot.interp.jagged.asjagged.destination).__doc__ = interp_fragments["see1"]
_method(uproot.interp.jagged.asjagged.fill).__doc__ = interp_fragments["see1"]
_method(uproot.interp.jagged.asjagged.clip).__doc__ = interp_fragments["see1"]
_method(uproot.interp.jagged.asjagged.finalize).__doc__ = interp_fragments["see1"]
################################################################ uproot.interp.jagged.JaggedArray
uproot.interp.jagged.JaggedArray.__doc__ = \
u"""Array of non-uniformly sized arrays, implemented with contiguous *content* and *offsets*.
Objects of this type can be sliced and indexed as an array of arrays, where each of the interior arrays may have a different length, but it is stored as three contiguous arrays:
- *content*: the interior data without array boundaries;
- *starts*: the starting index of each interior array (inclusive);
- *stops*: the stopping index of each interior array (exclusive).
The *starts* and *stops* may overlap significantly::
starts, stops = offsets[:-1], offsets[1:]
Stored this way, memory usage and fragmentation are minimized, and sequential access is cache-efficient if *starts* is monotonic (the usual case). Providing both a *starts* and a *stops* array allows jagged arrays to be arbitrarily sliced or sorted without copying the *content*.
This class has array-like semantics:
- square brackets (``__getitem__``) returns an inner array if the argument is an integer and a :py:class:`JaggedArray <uproot.interp.jagged.JaggedArray>` if the argument is a slice.
- the ``len`` function (``__len__``) returns the number of inner arrays.
- iteration (``__iter__``) iterates over inner arrays.
Parameters
----------
content : ``numpy.ndarray``
the *content* array, as defined above.
starts : ``numpy.ndarray``
the *starts* array, as defined above. Must be one-dimensional with an integer dtype.
stops : ``numpy.ndarray``
the *stops* array, as defined above. Must be one-dimensional with an integer dtype and the same length as *starts*.
"""
_method(uproot.interp.jagged.JaggedArray.fromlists).__doc__ = \
u"""Create a :py:class:`JaggedArray <uproot.interp.jagged.JaggedArray>` from Python iterables.
The Numpy types will be inferred from the content.
Parameters
----------
lists : iterable of iterables of numbers
the data to be converted.
Returns
-------
:py:class:`JaggedArray <uproot.interp.jagged.JaggedArray>`
the jagged array.
"""
################################################################ uproot.interp.strings.Strings
uproot.interp.strings.Strings.__doc__ = \
u"""Array of non-uniformly sized strings, implemented with contiguous *content* and *offsets*.
Objects of this type can be sliced and indexed as an array of strings, where each of the strings may have a different length, but it is stored as a :py:class:`JaggedArray <uproot.interp.jagged.JaggedArray>` of ``numpy.uint8``.
Numpy's string-handling options either force fixed-size strings (the ``"S"`` dtype) or non-contiguous Python objects (the ``"O"`` dtype).
This class has array-like semantics:
- square brackets (``__getitem__``) returns a string if the argument is an integer and a :py:class:`Strings <uproot.interp.strings.Strings>` if the argument is a slice.
- the ``len`` function (``__len__``) returns the number of strings.
- iteration (``__iter__``) iterates over strings.
In Python 3, these "strings" are ``bytes`` objects.
Parameters
----------
jaggedarray : :py:class:`JaggedArray <uproot.interp.jagged.JaggedArray>`
a jagged array with one-dimensional ``numpy.uint8`` content.
"""
_method(uproot.interp.strings.Strings.fromstrs).__doc__ = \
u"""Create a :py:class:`Strings <uproot.interp.strings.Strings>` from Python strings.
Parameters
----------
strs : iterable of Python strings
strings to convert. If any strings are Python 2 ``unicode`` or Python 3 ``str`` objects, they will be encoded as ``bytes`` with ``"utf-8"`` encoding, ``"replace"`` error semantics.
Returns
-------
:py:class:`Strings <uproot.interp.strings.Strings>`
the contiguous strings.
"""
################################################################ uproot.cache.MemoryCache
uproot.cache.memorycache.MemoryCache.__doc__ = \
u"""A ``dict`` with a least-recently-used (LRU) eviction policy.
This class implements every ``dict`` method and is a subclass of ``dict``, so it may be used anywhere ``dict`` is used. Unlike ``dict``, the least recently used key-value pairs are removed to avoid exceeding a user-specified memory budget. The memory budget may be temporarily exceeded during the process of setting the item. Memory use of the key and value are computed with ``sys.getsizeof`` and the memory use of internal data structures (a ``list``, a ``dict``, and some counters) are included in the memory budget. (The memory used by Numpy arrays is fully accounted for with ``sys.getsizeof``.)
Like ``dict``, this class is not thread-safe.
Like ``dict``, keys may be any hashable type.
**Attributes, properties, and methods:**
- **numbytes** (*int*) the number of bytes currently stored in this cache.
- **numevicted** (*int*) the number of key-value pairs that have been evicted.
- **promote(key)** declare a key to be the most recently used; raises ``KeyError`` if *key* is not in this cache (does not check spillover).
- **spill(key)** copies a key-value pair to the spillover, if any; raises ``KeyError`` if *key* is not in this cache (does not check spillover).
- **spill()** copies all key-value pairs to the spillover, if any.
- **do(key, function)** returns the value associated with *key*, if it exists; calls the zero-argument *function*, sets it to *key* and returns that if the *key* is not yet in the cache.
- all ``dict`` methods, following Python 3 conventions, in which **keys**, **values**, and **items** return iterators, rather than lists.
Parameters
----------
limitbytes : int
the memory budget expressed in bytes. Note that this is a required parameter.
spillover : ``None`` or another ``dict``-like object
another cache to use as a backup for this cache:
- when key-value pairs are evicted from this cache (if ``spill_immediately=False``) or put into this cache (if ``spill_immediately=True``), they are also inserted into the spillover;
- when keys are not found in this cache, the spillover is checked and, if found, the key-value pair is reinstated in this cache;
- when the user explicitly deletes a key-value pair from this cache, it is deleted from the spillover as well.
Usually, the spillover for a :py:class:`MemoryCache <uproot.cache.memorycache.MemoryCache>` is a :py:class:`DiskCache <uproot.cache.diskcache.DiskCache>` so that data that do not fit in memory migrate to disk, or so that the disk can be used as a persistency layer for data that are more quickly accessed in memory.
The same key-value pair might exist in both this cache and in the spillover cache because reinstating a key-value pair from the spillover does not delete it from the spillover. When ``spill_immediately=True``, *every* key-value pair in this cache is also in the spillover cache (assuming the spillover has a larger memory budget).
spill_immediately : bool
if ``False`` *(default)*, key-value pairs are only copied to the spillover cache when they are about to be evicted from this cache (the spillover is a backup); if ``True``, key-value pairs are copied to the spillover cache immediately after they are inserted into this cache (the spillover is a persistency layer).
items : iterable of key-value 2-tuples
ordered pairs to insert into this cache; same meaning as in ``dict`` constructor. Unlike ``dict``, the order of these pairs is relevant: the first item in the list is considered the least recently "used".
**kwds
key-value pairs to insert into this cache; same meaning as in ``dict`` constructor.
"""
################################################################ uproot.cache.ThreadSafeMemoryCache
uproot.cache.memorycache.ThreadSafeMemoryCache.__doc__ = \
u"""A ``dict`` with a least-recently-used (LRU) eviction policy and thread safety.
This class is a thread-safe version of :py:class:`MemoryCache <uproot.cache.memorycache.MemoryCache>`, provided by a global lock. Every method acquires the lock upon entry and releases it upon exit.
See :py:class:`MemoryCache <uproot.cache.memorycache.MemoryCache>` for details on the constructor and methods.
"""
################################################################ uproot.cache.ThreadSafeDict
uproot.cache.memorycache.ThreadSafeDict.__doc__ = \
u"""A ``dict`` with thread safety.
This class is a direct subclass of ``dict`` with a global lock. Every method acquires the lock upon entry and releases it upon exit.
"""
################################################################ uproot.cache.DiskCache
uproot.cache.diskcache.DiskCache.__doc__ = \
u"""A persistent, ``dict``-like object with a least-recently-used (LRU) eviction policy.
This class is not a subclass of ``dict``, but it implements the major features of the ``dict`` interface:
- square brackets get objects from the cache (``__getitem__``), put them in the cache (``__setitem__``), and delete them from the cache (``__delitem__``);
- ``in`` checks for key existence (``__contains__``);
- **keys**, **values**, and **items** return iterators over cache contents.
Unlike ``dict``, the least recently used key-value pairs are removed to avoid exceeding a user-specified memory budget. The memory budget may be temporarily exceeded during the process of setting the item.
Unlike ``dict``, all data is stored in a POSIX filesystem. The only data the in-memory object maintains is a read-only **config**, the **directory** name, and **read**, **write** functions for deserializing/serializing objects.
Unlike ``dict``, this cache is thread-safe and process-safe--- several processes can read and write to the same cache concurrently, and these threads/processes do not need to be aware of each other (so they can start and stop at will). The first thread/process calls :py:meth:`create <uproot.cache.diskcache.DiskCache.create>` to make a new cache directory and the rest :py:meth:`join <uproot.cache.diskcache.DiskCache.join>` an existing directory. Since the cache is on disk, it can be joined even if all processes are killed and restarted.
Do not use the :py:class:`DiskCache <uproot.cache.diskcache.DiskCache>` constructor: create instances using :py:meth:`create <uproot.cache.diskcache.DiskCache.create>` and :py:meth:`join <uproot.cache.diskcache.DiskCache.join>` *only*.
The cache is configured by a read-only ``config.json`` file and its changing state is tracked with a ``state.json`` file. Key lookup is performed through a shared, memory-mapped ``lookup.npy`` file. When the cache must be locked, it is locked by locking the ``lookup.npy`` file (``fcntl.LOCK_EX``). Read and write operations only lock while hard-linking or renaming files--- bulk reading and writing is performed outside the lock.
The names of the keys and their priority order is encoded in a subdirectory tree, which is updated in such a way that no directory exceeds a maximum number of subdirectories and the least and most recently used keys can be identified without traversing all of the keys.
The ``lookup.npy`` file is a binary-valued hashmap. If two keys hash to the same value, collisions are resolved via JSON files. Collisions are very expensive and should be avoided by providing enough slots in the ``lookup.npy`` file.
Unlike ``dict``, keys must be strings.
**Attributes, properties, and methods:**
- **numbytes** (*int*) the number of bytes currently stored in the cache.
- **config.limitbytes** (*int*) the memory budget expressed in bytes.
- **config.lookupsize** (*int*) the number of slots in the hashmap ``lookup.npy`` (increase this to reduce collisions).
- **config.maxperdir** (*int*) the maximum number of subdirectories per directory.
- **config.delimiter** (*str*) used to separate order prefix from keys.
- **config.numformat** (*str*) Numpy dtype of the ``lookup.npy`` file (as a string).
- **state.numbytes** (*int*) see **numbytes** above.
- **state.depth** (*int*) current depth of the subdirectory tree.
- **state.next** (*int*) next integer in the priority queue.
- **refresh_config()** re-reads **config** from ``config.json``.
- **promote(key)** declare a key to be the most recently used; raises ``KeyError`` if *key* is not in the cache.
- **keys()** locks the cache and returns an iterator over keys; cache is unlocked only when iteration finishes (so evaluate this quickly to avoid blocking the cache for all processes).
- **values()** locks the cache and returns an iterator over values; same locking warning.
- **items()** locks the cache and returns an iterator over key-value pairs; same locking warning.
- **destroy()** deletes the directory--- all subsequent actions are undefined.
- **do(key, function)** returns the value associated with *key*, if it exists; calls the zero-argument *function*, sets it to *key* and returns that if the *key* is not yet in the cache.
"""
cache_diskcache_fragments = {
# read
"read": u"""read : function (filename, cleanup) \u21d2 data
deserialization function, used by "get" to turn files into Python objects (such as arrays). This function must call ``cleanup()`` when reading is complete, regardless of whether an exception occurs.""",
# write
"write": u"""write : function (filename, data) \u21d2 ``None``
serialization function, used by "put" to turn Python objects (such as arrays) into files. The return value of this function is ignored.""",
}
_method(uproot.cache.diskcache.DiskCache.create).__doc__ = \
u"""Create a new disk cache.
Parameters
----------
limitbytes : int
the memory budget expressed in bytes.
directory : str
local path to the directory to create as a disk cache. If a file or directory exists at that location, it will be overwritten.
{read}
{write}
lookupsize : int
the number of slots in the hashmap ``lookup.npy`` (increase this to reduce collisions).
maxperdir : int
the maximum number of subdirectories per directory.
delimiter : str
used to separate order prefix from keys.
numformat : ``numpy.dtype``
type of the ``lookup.npy`` file.
Returns
-------
:py:class:`DiskCache <uproot.cache.diskcache.DiskCache>`
first view into the disk cache.
""".format(**cache_diskcache_fragments)
_method(uproot.cache.diskcache.DiskCache.join).__doc__ = \
u"""Instantate a view into an existing disk cache.
Parameters
----------
directory : str
local path to the directory to view as a disk cache.
{read}
{write}
check : bool
if ``True`` *(default)*, verify that the structure of the directory is a properly formatted disk cache, raising ``ValueError`` if it isn't.
Returns
-------
:py:class:`DiskCache <uproot.cache.diskcache.DiskCache>`
view into the disk cache.
""".format(**cache_diskcache_fragments)
################################################################ uproot.cache.diskcache.arrayread
uproot.cache.diskcache.arrayread.__doc__ = \
u"""Sample deserialization function; reads Numpy files (``*.npy``) into Numpy arrays.
To be used as an argument to :py:meth:`create <uproot.cache.diskcache.DiskCache.create>` or :py:meth:`join <uproot.cache.diskcache.DiskCache.join>`.
Parameters
----------
filename : str
local path to read.
cleanup : function () \u21d2 ``None``
cleanup function to call after reading is complete.
Returns
-------
``numpy.ndarray``
Numpy array.
"""
################################################################ uproot.cache.diskcache.arraywrite
uproot.cache.diskcache.arraywrite.__doc__ = \
u"""Sample serialization function; writes Numpy arrays into Numpy files (``*.npy``).
To be used as an argument to :py:meth:`create <uproot.cache.diskcache.DiskCache.create>` or :py:meth:`join <uproot.cache.diskcache.DiskCache.join>`.
Parameters
----------
filename : str
local path to overwrite.
obj : ``numpy.ndarray``
array to write.
"""
################################################################ uproot.cache.diskcache.memmapread
uproot.cache.diskcache.memmapread.__doc__ = \
u"""Lazy deserialization function; reads Numpy files (``*.npy``) as a memory-map.
To be used as an argument to :py:meth:`create <uproot.cache.diskcache.DiskCache.create>` or :py:meth:`join <uproot.cache.diskcache.DiskCache.join>`.
Parameters
----------
filename : str
local path to read.
cleanup : function () \u21d2 ``None``
cleanup function to call after reading is complete.
Returns
-------
wrapped ``numpy.core.memmap``
cleanup function is called when this object is destroyed (``__del__``).
"""
################################################################ uproot.source.cursor.Cursor
uproot.source.cursor.Cursor.__doc__ = \
u"""Maintain a position in a :py:class:`Source <uproot.source.source.Source>` that updates as data are read.
**Attributes, properties, and methods:**
- **index** (*int*) the position.
- **origin** (*int*) "beginning of buffer" position, used in the **refs** key in :py:func:`uproot.rootio._readobjany <uproot.rootio._readobjany>`.
- **refs** (``None`` or ``dict``-like) manages cross-references in :py:func:`uproot.rootio._readobjany <uproot.rootio._readobjany>`.
- :py:meth:`copied <uproot.source.cursor.Cursor.copied>` return a copy of this :py:class:`Cursor <uproot.source.cursor.Cursor>` with modifications.
- :py:meth:`skipped <uproot.source.cursor.Cursor.skipped>` return a copy of this :py:class:`Cursor <uproot.source.cursor.Cursor>` with the **index** moved forward.
- :py:meth:`skip <uproot.source.cursor.Cursor.skip>` move the **index** of this :py:class:`Cursor <uproot.source.cursor.Cursor>` forward.
- :py:meth:`fields <uproot.source.cursor.Cursor.fields>` interpret bytes in the :py:class:`Source <uproot.source.source.Source>` with given data types and skip the **index** past them.
- :py:meth:`field <uproot.source.cursor.Cursor.field>` interpret bytes in the :py:class:`Source <uproot.source.source.Source>` with a given data type and skip the **index** past it.
- :py:meth:`bytes <uproot.source.cursor.Cursor.bytes>` return a range of bytes from the :py:class:`Source <uproot.source.source.Source>` and skip the **index** past it.
- :py:meth:`array <uproot.source.cursor.Cursor.array>` return a range of bytes from the :py:class:`Source <uproot.source.source.Source>` as a typed Numpy array and skip the **index** past it.
- :py:meth:`string <uproot.source.cursor.Cursor.string>` read a string from the :py:class:`Source <uproot.source.source.Source>`, interpreting the first 1 or 5 bytes as a size and skip the **index** past it.
- :py:meth:`cstring <uproot.source.cursor.Cursor.cstring>` read a null-terminated string from the :py:class:`Source <uproot.source.source.Source>` and skip the **index** past it.
- :py:meth:`skipstring <uproot.source.cursor.Cursor.skipstring>` interpret the first 1 or 5 bytes as a size and skip the **index** past the string (without creating a Python string).
- :py:meth:`hexdump <uproot.source.cursor.Cursor.hexdump>` view a section of the :py:class:`Source <uproot.source.source.Source>` as formatted by the POSIX ``hexdump`` program and *do not* move the **index**.
Parameters
----------
index : int
the initial **index**.
origin : int
the **origin**, *(default is 0)*.
refs : ``None`` or ``dict``-like
if ``None`` *(default)*, use a new :py:class:`ThreadSafeDict <uproot.cache.memorycache.ThreadSafeDict>` as the **ref**; otherwise, use the value provided.
"""
format_source_cursor = {
# source
"source": u"""source : :py:class:`Source <uproot.source.source.Source>`
data to be read."""
}
_method(uproot.source.cursor.Cursor.copied).__doc__ = \
u"""Return a copy of this :py:class:`Cursor <uproot.source.cursor.Cursor>` with modifications.
Parameters
----------
index : ``None`` or int
if not ``None`` *(default)*, use this as the new index position.
origin : ``None`` or int
if not ``None`` *(default)*, use this as the new origin.
refs : ``None`` or ``dict``-like
if not ``None`` *(default)*, use this as the new refs.
Returns
-------
:py:class:`Cursor <uproot.source.cursor.Cursor>`
the new cursor.
Notes
-----
This is a shallow copy--- the **refs** are shared with the parent and all other copies.
""".format(**format_source_cursor)
_method(uproot.source.cursor.Cursor.skipped).__doc__ = \
u"""Return a copy of this :py:class:`Cursor <uproot.source.cursor.Cursor>` with the **index** moved forward.
Parameters
----------
numbytes : int
number of bytes to be skipped in the copy, leaving the original unchanged.
origin : ``None`` or int
if not ``None`` *(default)*, use this as the new origin.
refs : ``None`` or ``dict``-like
if not ``None`` *(default)*, use this as the new refs.
Returns
-------
:py:class:`Cursor <uproot.source.cursor.Cursor>`
the new cursor.
Notes
-----
This is a shallow copy--- the **refs** are shared with the parent and all other copies.
""".format(**format_source_cursor)
_method(uproot.source.cursor.Cursor.skip).__doc__ = \
u"""Move the **index** of this :py:class:`Cursor <uproot.source.cursor.Cursor>` forward.
Parameters
----------
numbytes : int
number of bytes to skip
""".format(**format_source_cursor)
_method(uproot.source.cursor.Cursor.fields).__doc__ = \
u"""Interpret bytes in the :py:class:`Source <uproot.source.source.Source>` with given data types and skip the **index** past them.
Parameters
----------
{source}
format : ``struct.Struct``
compiled parser from Python's ``struct`` library.
Returns
-------
tuple
field values (types determined by format)
""".format(**format_source_cursor)
_method(uproot.source.cursor.Cursor.field).__doc__ = \
u"""Interpret bytes in the :py:class:`Source <uproot.source.source.Source>` with a given data type and skip the **index** past it.
Parameters
----------
{source}
format : ``struct.Struct``
compiled parser from Python's ``struct`` library; must return only one field.
Returns
-------
type determined by format
field value
""".format(**format_source_cursor)
_method(uproot.source.cursor.Cursor.bytes).__doc__ = \
u"""Return a range of bytes from the :py:class:`Source <uproot.source.source.Source>` and skip the **index** past it.
Parameters
----------
{source}
length : int
number of bytes.
Returns
-------
``numpy.ndarray`` of ``numpy.uint8``
raw view of data from source.
""".format(**format_source_cursor)
_method(uproot.source.cursor.Cursor.array).__doc__ = \
u"""Return a range of bytes from the :py:class:`Source <uproot.source.source.Source>` as a typed Numpy array and skip the **index** past it.
Parameters
----------
{source}
length : int
number of items.
dtype : ``numpy.dtype``
type of the array.
Returns
-------
``numpy.ndarray``
interpreted view of data from source.
""".format(**format_source_cursor)
_method(uproot.source.cursor.Cursor.string).__doc__ = \
u"""Read a string from the :py:class:`Source <uproot.source.source.Source>`, interpreting the first 1 or 5 bytes as a size and skip the **index** past it.
Parameters
----------
{source}
Returns
-------
bytes
Python string (``bytes`` in Python 3).
""".format(**format_source_cursor)
_method(uproot.source.cursor.Cursor.cstring).__doc__ = \
u"""Read a null-terminated string from the :py:class:`Source <uproot.source.source.Source>` and skip the **index** past it.
The index is also skipped past the null that terminates the string.
Parameters
----------
{source}
Returns
-------
bytes
Python string (``bytes`` in Python 3).
""".format(**format_source_cursor)
_method(uproot.source.cursor.Cursor.skipstring).__doc__ = \
u"""Interpret the first 1 or 5 bytes as a size and skip the **index** past the string (without creating a Python string).
Parameters
----------
{source}
""".format(**format_source_cursor)
_method(uproot.source.cursor.Cursor.hexdump).__doc__ = \
u"""View a section of the :py:class:`Source <uproot.source.source.Source>` as formatted by the POSIX ``hexdump`` program and *do not* move the **index**.
This is much more useful than simply hexdumping the whole file, since partial interpretation is necessary to find the right point in the file to dump.
Parameters
----------
{source}
size : int
number of bytes to view; default is 160 (10 lines).
offset : int
where to start the view, relative to index; default is 0 (at index).
format : str
Python's printf-style format string for individual bytes; default is "%02x" (zero-prefixed, two-character hexidecimal).
Returns
-------
str
hexdump-formatted view to be printed
""".format(**format_source_cursor)
################################################################ uproot.source.source.Source
uproot.source.source.Source.__doc__ = \
u"""Interface for data sources.
Sources do not need to inherit from this class, but they do need to satisfy the interface described below.
**parent(self)**
return the :py:class:`Source <uproot.source.source.Source>` from which this was copied; may be ``None``.
**threadlocal(self)**
either return ``self`` (if thread-safe) or return a thread-safe copy, such as a new file handle into the same file.
**dismiss(self)**
thread-local copies are no longer needed; they may be eliminated if redundant.
**data(self, start, stop, dtype=None)**
return a view of data from the starting byte (inclusive) to the stopping byte (exclusive), with a given Numpy type (numpy.uint8 if ``None``).
"""
source_fragments = {
# see1
"see1": u"""Part of the :py:class:`Source <uproot.source.source.Source>` interface; type ``help(uproot.source.source.Source)`` for details.""",
# see2
"see2": u"""Methods implementing the :py:class:`Source <uproot.source.source.Source>` interface are not documented here.""",
}
################################################################ uproot.source.file.FileSource
uproot.source.file.FileSource.defaults.__doc__ = \
u"""Provide sensible defaults for a :py:class:`FileSource <uproot.source.file.FileSource>`.
The default parameters are:
- **chunkbytes:** 8*1024 (8 kB per chunk, the minimum that pages into memory if you try to read one byte on a typical Linux system).
- **limitbytes:** 1024**2 (1 MB), a very modest amount of RAM.
Parameters
----------
path : str
local file path of the input file (it must not be moved during reading!).
Returns
-------
:py:class:`FileSource <uproot.source.file.FileSource>`
a new file source.
"""
uproot.source.file.FileSource.__doc__ = \
u"""Emulate a memory-mapped interface with traditional file handles, opening many if necessary.
:py:class:`FileSource <uproot.source.file.FileSource>` objects avoid double-reading and many small reads by caching data in chunks. All thread-local copies of a :py:class:`FileSource <uproot.source.file.FileSource>` share a :py:class:`ThreadSafeMemoryCache <uproot.cache.memorycache.ThreadSafeMemoryCache>` to avoid double-reads across threads.
Parameters
----------
path : str
local file path of the input file (it must not be moved during reading!).
chunkbytes : int
number of bytes per chunk.
limitbytes : int
maximum number of bytes to keep in the cache.
Notes
-----
{see2}
""".format(**source_fragments)
_method(uproot.source.file.FileSource.parent).__doc__ = source_fragments["see1"]
_method(uproot.source.file.FileSource.threadlocal).__doc__ = source_fragments["see1"]
_method(uproot.source.file.FileSource.dismiss).__doc__ = source_fragments["see1"]
_method(uproot.source.file.FileSource.data).__doc__ = source_fragments["see1"]
################################################################ uproot.source.memmap.MemmapSource
uproot.source.memmap.MemmapSource.defaults.__doc__ = \
u"""Provide sensible defaults for a :py:class:`MemmapSource <uproot.source.memmap.MemmapSource>`.
This is a dummy function, as :py:class:`MemmapSource <uproot.source.memmap.MemmapSource>` is not parameterizable. It exists to satisfy code symmetry.
Parameters
----------
path : str
local file path of the input file.
Returns
-------
:py:class:`MemmapSource <uproot.source.memmap.MemmapSource>`
a new memory-mapped source.
"""
uproot.source.memmap.MemmapSource.__doc__ = \
u"""Thin wrapper around a memory-mapped file, which already behaves like a :py:class:`Source <uproot.source.source.Source>`.
Parameters
----------
path : str
local file path of the input file.
Notes
-----
{see2}
""".format(**source_fragments)
_method(uproot.source.memmap.MemmapSource.parent).__doc__ = source_fragments["see1"]
_method(uproot.source.memmap.MemmapSource.threadlocal).__doc__ = source_fragments["see1"]
_method(uproot.source.memmap.MemmapSource.dismiss).__doc__ = source_fragments["see1"]
_method(uproot.source.memmap.MemmapSource.data).__doc__ = source_fragments["see1"]
################################################################ uproot.source.xrootd.XRootDSource
uproot.source.xrootd.XRootDSource.defaults.__doc__ = \
u"""Provide sensible defaults for a :py:class:`XRootDSource <uproot.source.xrootd.XRootDSource>`.
The default parameters are:
- **chunkbytes:** 8*1024 (8 kB per chunk).
- **limitbytes:** 1024**2 (1 MB), a very modest amount of RAM.
Parameters
----------
path : str
remote file URL.
Returns
-------
:py:class:`XRootDSource <uproot.source.xrootd.XRootDSource>`
a new XRootD source.
"""
uproot.source.xrootd.XRootDSource.__doc__ = \
u"""Emulate a memory-mapped interface with XRootD.
XRootD is already thread-safe, but provides no caching. :py:class:`XRootDSource <uproot.source.xrootd.XRootDSource>` objects avoid double-reading and many small reads by caching data in chunks. They are not duplicated when splitting into threads.
Parameters
----------
path : str
remote file URL.
chunkbytes : int
number of bytes per chunk.
limitbytes : int
maximum number of bytes to keep in the cache.
Notes
-----
{see2}
""".format(**source_fragments)
_method(uproot.source.xrootd.XRootDSource.parent).__doc__ = source_fragments["see1"]
_method(uproot.source.xrootd.XRootDSource.threadlocal).__doc__ = source_fragments["see1"]
_method(uproot.source.xrootd.XRootDSource.dismiss).__doc__ = source_fragments["see1"]
_method(uproot.source.xrootd.XRootDSource.data).__doc__ = source_fragments["see1"]
################################################################ uproot.source.compressed.Compression
uproot.source.compressed.Compression.__doc__ = \
u"""Describe the compression of a compressed block.
**Attributes, properties, and methods:**
- **algo** (*int*) algorithm code.
- **level** (*int*) 0 is no compression, 1 is least, 9 is most.
- **algoname** (*str*) algorithm expressed as a string: ``"zlib"``, ``"lzma"``, ``"old"``, or ``"lz4"``.
- **copy(algo=None, level=None)** copy this :py:class:`Compression <uproot.source.compressed.Compression>` object, possibly changing a field.
- **decompress(source, cursor, compressedbytes, uncompressedbytes)** decompress data from **source** at **cursor**, knowing the compressed and uncompressed size.
Parameters
----------
fCompress : int
ROOT fCompress field.
"""
################################################################ uproot.source.compressed.CompressedSource
uproot.source.compressed.CompressedSource.__doc__ = \
u"""A :py:class:`Source <uproot.source.source.Source>` for compressed data.
Decompresses on demand--- without caching the result--- so cache options in higher-level array functions are very important.
Ordinary users would never create a :py:class:`CompressedSource <uproot.source.compressed.CompressedSource>`. They are produced when a TKey encounters a compressed value.
Parameters
----------
compression : :py:class:`Compression <uproot.source.compressed.Compression>`
inherited description of the compression. Note that *this is overridden* by the first two bytes of the compressed block, which can disagree with the higher-level description and takes precedence.
source : :py:class:`Source <uproot.source.source.Source>`
the source in which compressed data may be found.
cursor : :py:class:`Cursor <uproot.source.cursor.Cursor>`
location in the source.
compressedbytes : int
number of bytes after compression.
uncompressedbytes : int
number of bytes before compression.
"""
| [
"[email protected]"
]
| |
f78310a73f5328335184fda438b6808995b9f2c9 | b0e66db67b34b88e7884aa9b4a7b7607bbe9651b | /math/d20/const.py | ae16e3cdb42ae989d9829f7ffef9815a604cce32 | []
| no_license | cole-brown/veredi-code | 15cf47c688c909b27ad2f2f3518df72862bd17bc | 8c9fc1170ceac335985686571568eebf08b0db7a | refs/heads/master | 2023-04-22T03:21:10.506392 | 2021-05-01T19:05:10 | 2021-05-01T19:05:10 | 296,949,870 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,009 | py | # coding: utf-8
'''
Constants, Enums, and Stuff for math.d20 module.
'''
# -----------------------------------------------------------------------------
# Imports
# -----------------------------------------------------------------------------
import enum
# -----------------------------------------------------------------------------
# Constants
# -----------------------------------------------------------------------------
@enum.unique
class FormatOptions(enum.Flag):
NONE = 0
INITIAL = enum.auto()
INTERMEDIATE = enum.auto()
FINAL = enum.auto()
ALL = INITIAL | INTERMEDIATE | FINAL
def all(self, flag):
return ((self & flag) == flag)
def any(self, *flags):
for each in flags:
if (self & each) == each:
return True
return False
# -----------------------------------------------------------------------------
# Code
# -----------------------------------------------------------------------------
| [
"[email protected]"
]
| |
f8628d4fb035631907212605035a7ea9aa7a7f62 | 8a83bb7acb9b62183fca817e1f196dd8075630a4 | /01_array/23_majority_number.py | ec356909a26e7fbefe85d7ba6c55c3904da6c149 | []
| no_license | sandeepkumar8713/pythonapps | ff5ad3da854aa58e60f2c14d27359f8b838cac57 | 5dcb5ad4873124fed2ec3a717bfa379a4bbd197d | refs/heads/main | 2023-09-01T04:12:03.865755 | 2023-08-31T07:04:58 | 2023-08-31T07:04:58 | 234,762,925 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,565 | py | # CTCI : Q17_10_Majority_Element
# https://www.geeksforgeeks.org/majority-element/
# Question : Write a function which takes an array and prints the majority element (if it exists),
# otherwise prints "No Majority Element". A majority element in an array A[] of size n is an
# element that appears more than n/2 times (and hence there is at most one such element).
#
# Question Type : Easy
# Used : findCandidate():
# maj_index = 0, count = 1
# Run a loop of the input array.
# If A[maj_index] == A[i]: count++ else count--
# If count == 0: (take current maj_index) maj_index = i, count = 1
# return A[maj_index]
# This value returned by findCandidate() might be majority number.
# Run a loop over elements and check if this is majority number.
# Complexity : O(n)
def findCandidate(A):
maj_index = 0
count = 1
for i in range(len(A)):
if A[maj_index] == A[i]:
count += 1
else:
count -= 1
if count == 0:
maj_index = i
count = 1
return A[maj_index]
def isMajority(A, cand):
count = 0
for i in range(len(A)):
if A[i] == cand:
count += 1
if count > len(A) / 2:
return True
else:
return False
def printMajority(A):
cand = findCandidate(A)
if isMajority(A, cand):
print(cand)
else:
print("No Majority Element")
if __name__ == "__main__":
A = [1, 3, 3, 1, 2, 3, 3]
printMajority(A)
A = [1, 3, 3, 1, 2, 3]
printMajority(A)
| [
"[email protected]"
]
| |
7ba891f64ca2ff2a94dfa0b697e25d988e8912f0 | cf5b54b2c84437d9e72575589812b88921d4430c | /server/main_web.py | db2c8ed2232e14bbc1f855ffda8417b9bbde11df | []
| no_license | hw233/gameserver3 | 90fc3b8b8e8f809b7c3197fc97145fb264166b93 | d86ef3b3313ef51df5ba01bc700877827b0a81cd | refs/heads/master | 2022-01-13T07:40:38.211070 | 2019-05-11T02:15:14 | 2019-05-11T02:15:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 36,452 | py | # -*- coding: utf-8 -*-
__author__ = 'Administrator'
import json,sys,redis,time,os,os.path,decimal
import hashlib
import zipfile
import logging
import urlparse
import subprocess
from flask import Flask,request,render_template,redirect, url_for,jsonify,make_response,abort,flash,get_flashed_messages
from flask import session as flask_session
from werkzeug import secure_filename
from conf import DevConfig
from db.connect import *
from db.order import *
from db.mail import *
from db.bag_item import *
from db.charge_item import *
from db.mail import *
from db.reward_user_log import *
from db.customer_service_log import *
from db.user import *
from db.pop_activity import *
from db.pop_activity_user import *
from db.lucky import *
from db.avatar_verify import *
from config.var import *
from config.rank import *
from helper import datehelper
from sqlalchemy import and_
from sqlalchemy.sql import desc
# from web.upload import *
# from web.avatar import *
# from config.var import *
# from config.vip import *
from hall.hallobject import *
from task.achievementtask import *
from rank.chargetop import *
from hall.rank import *
from dal.core import *
from hall.charge_order import *
from hall.messagemanager import *
from hall.customerservice import *
from hall.flow import *
from hall.mail import *
from activity.luckywheel import *
from helper import wordhelper
from util.commonutil import set_context,get_context
from order.orderhandler import receive_order_callback,create_charge_order
reload(sys)
sys.setdefaultencoding('utf-8')
app = Flask(__name__, static_path='',static_folder='',template_folder='web/templates')
app.config.from_object(DevConfig)
app.secret_key = 'Wgc!@##@!2017'
session = Session()
r = redis.Redis(**WEB_REDIS)
da = DataAccess(r)
STATIC_PATH = 'web/static/'
# uploader = Uploader(STATIC_PATH)
vip = VIPObject(None)
customer_service = CustomerService(r)
# CPID:jjcq20170223_141_312
# CPKEY:d5ff856beccf4c472831c3f16c376e28
# CP_KEY = 'd5ff856beccf4c472831c3f16c376e28'
# VIP_UP=u'玩家%s一掷千金,成功升级为%s,成为人生赢家!(VIP1+)'
UPLOAD_FOLDER = 'web/static/avatar'
UPGRADE_FOLDER = 'web/static/upgrade'
ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif','zip'])
REAL_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'web/static/avatar')
UPGRADE_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'web/static/upgrade')
BACK_UP = 'backup'
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
@app.route('/', methods=['GET', 'POST'])
def demo():
return 'hello world!123123'
@app.route('/flow')
def flow():
code = int(request.args.get('code'))
msg = request.args.get('msg')
req_id = request.args.get('req_id')
Flow.callback_flow(session, req_id, code, msg, da, r)
return jsonify({'code':1,'text':'','error':{}})
@app.route('/activity')
def activity():
return render_template('activity.html',GO_BACK=ACTIVITY_BACK_URL)
@app.route('/activity/back')
def activity_back():
MessageManager.push_h5_back(r, int(request.args.get('uid')), {'data':{}})
return jsonify({'code':0,'msg':'success','ext':{}})
@app.route('/activity/create_order')
def activity_create_order():
result = create_charge_order(session, r, da, int(request.args.get('uid')), int(request.args.get('shop_id')), request.args.get('comment'))
if result:
MessageManager.push_h5_create_order(r, int(request.args.get('uid')), {'data':{
'order':{
'order_sn':result['order_sn'],
'money':result['money'],
'callback':result['callback'],
'name':result['name']
}
}})
return jsonify({'code':0,'msg':'success','ext':{
'order_sn':result['order_sn'],
'money':result['money'],
'callback':result['callback'],
}})
return jsonify({'code':-1,'msg':'error','ext':{}})
@app.route('/wheel')
def wheel():
uid = request.args.get('uid')
if uid == None or uid <= 0:
return 404
activity = ActivityManager(session, da, r)
activity_user = activity.load_activity_user(uid)
data = {}
if activity_user == None:
data['play_count'] = 0
else:
user_params = activity.prase_activity_params(activity_user.params)
data['play_count'] = user_params['wheel']['play_count']
data['wheel_reward'] = json.loads(r.get('activity_wheel_info'))
progress_list = r.lrange('activity_wheel_code',0,-1)
data['progress'] = int(len(progress_list) / float(data['wheel_reward']['wheel_len']) * 100)
data['my_progress'] = len([1 for x in progress_list if x.split('_')[0] == uid])
data['ACTIVITY_CREATE_URL'] = ACTIVITY_CREATE_URL
data['ACTIVITY_PLAY'] = ACTIVITY_PLAY
# 2.读取最新一期的宝物数据
return render_template('wheel.html',data=data)
@app.route('/wheel/play')
def wheel_play():
if get_context('session') == None:
set_context('session', session)
am = ActivityManager(session, da, r)
uid = request.args.get('uid')
flag, result = am.get_handle('wheel',uid)
if flag:
activity_game = result
if activity_game.can_play():
activity_game.handle()
am.save_game_result(activity_game)
data = {}
data['play_count'] = activity_game.params['wheel']['play_count']
data['lucky_val'] = activity_game.result[1]
data['lucky_code'] = activity_game.result[3]
data['lucky_key'] = activity_game.result[4]
data['index'] = activity_game.result[2]
data['wheel_reward'] = json.loads(r.get('activity_wheel_info'))
progress_list = r.lrange('activity_wheel_code',0,-1)
data['progress'] = int(len(progress_list) / float(data['wheel_reward']['wheel_len']) * 100)
data['progress'] = data['progress'] if data['progress'] > 1 else 1
data['my_progress'] = len([1 for x in progress_list if x.split('_')[0] == uid])
return jsonify({'code':0,'msg':'success','ext':data})
else:
return jsonify({'code':-1, 'msg':u'当前可用次数为0,请充值','ext':{}})
else:
return jsonify(result)
@app.route('/wheel/history')
def wheel_history():
if request.args.get('uid'):
lists = []
logs = session.query(TActivityWheelLog,TUser)\
.join(TUser,TActivityWheelLog.uid == TUser.id)\
.filter(TActivityWheelLog.uid == request.args.get('uid'))\
.order_by(TActivityWheelLog.create_time.desc())\
.limit(20)\
.all()
for log in logs:
lists.append({'title':log[0].reward_item,'create_time':log[0].create_time.strftime('%Y-%m-%d %H:%M:%S'),'nick':log[1].nick,'round':log[0].round})
# return render_template('wheel_history.html', lists=lists, uid=request.args.get('uid'))
return jsonify({
'code':0,
'msg':'success',
'ext':{'lists':lists}
})
logs = session.query(TActivityWheelLog,TUser)\
.join(TUser,TActivityWheelLog.uid == TUser.id)\
.order_by(TActivityWheelLog.create_time.desc())\
.limit(20)\
.all()
lists = []
for log in logs:
lists.append( {'title':log[0].reward_item,'create_time':log[0].create_time.strftime('%Y-%m-%d %H:%M:%S'),'nick':log[1].nick,'round':log[0].round} )
# return render_template('wheel_history.html', lists=lists)
return jsonify({
'code':0,
'msg':'success',
'ext':{'lists':lists}
})
@app.route('/avatar', methods=['GET', 'POST'])
def upload_avatar():
if request.method == 'POST':
file = request.files['file']
uid = request.form['uid']
device_id = request.form['device_id']
if file == None or uid == None or device_id == None:
return jsonify(result=0,message='error,file or uid or device_id is empty!')
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
savefolder = time.strftime('%Y-%m-%d')
savepath = REAL_PATH+os.sep+time.strftime('%Y-%m-%d')
if not os.path.isdir(savepath):
os.mkdir(savepath)
img_hard_path = os.path.join(savepath, uid+'_'+device_id+'_'+filename)
file.save(img_hard_path)
# return redirect(url_for('uploaded_file', filename=filename))
full_filename ='/'+uid+'_'+device_id+'_'+filename
path_url = request.url_root+UPLOAD_FOLDER+'/'+savefolder+full_filename
# delete old avatar
user = da.get_user(uid)
if user.avatar != '' and len(user.avatar) > 0:
old_avatar = user.avatar
old_avatar_path = os.getcwd()+urlparse.urlparse(old_avatar).path
if os.path.exists(old_avatar_path) and os.path.isfile(old_avatar_path):
os.remove(old_avatar_path)
# update new avatar to db
av = TAvatarVerify()
av.uid = uid
av.avatar = path_url
av.allow = 1
av.add_time = datehelper.get_today_str()
session.merge(av)
session.flush()
return jsonify(result=0, message='success,message:'+full_filename+',path:'+path_url,url=path_url)
# result = session.query(TUser).filter(TUser.id == uid).filter(TUser.id == int(uid)).update({
# TUser.avatar:path_url
# })
# compress_image(img_hard_path)
# if result > 0:
# 修改头像换缓存
# if r.exists('u'+str(uid)):
# r.hset('u'+str(uid), 'avatar', path_url)
# sys_achi = SystemAchievement(session,uid)
# if not sys_achi.is_achievement_finished(AT_UPLOAD_AVATAR):
# sys_achi.finish_upload_avatar()
# MessageManager.push_notify_reward(r, uid)
# session.flush()
# return jsonify(result=0,message='success,message:'+full_filename+',path:'+path_url,url=path_url)
# return jsonify(result=-1,message='error:upload return false')
pathDir = os.listdir(REAL_PATH)
html = ''
for allDir in pathDir:
# child = os.path.join('%s%s' % (REAL_PATH, allDir))
html +='<li><a href="'+request.url_root+UPLOAD_FOLDER+'/'+allDir+'">'+request.url_root+UPLOAD_FOLDER+'/'+allDir+'</a></li>'
return '''
<!doctype html>
<title>Upload new File</title>
<h1>Upload new <span style='color:green;'>avatar</span></h1>
<form action="" method=post enctype=multipart/form-data>
<p><input type=file name=file>
<input type=text name=uid placeholder=uid>
<input type=text name=device_id placeholder=device_id>
<input type=submit value=Upload>
</form>
<ol>
%s
</ol>
''' % html
# ''' % "<br>".join(os.listdir(app.config['UPLOAD_FOLDER'],))
def compress_image(file_path_url):
cmd = '/usr/bin/pngquant --force --ext=.png --verbose --skip-if-larger --quality=50-90 %s' % file_path_url
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
retval = p.wait()
@app.route('/avatar_verify', methods=['GET', 'POST'])
def avatar_verify():
if request.method == 'GET':
lists = session.query(TAvatarVerify).filter(TAvatarVerify.allow == 0).all()
return render_template('avatar_verify.html',lists=lists)
is_allow = int(request.form['send'])
uid = int(request.form['uid'])
row = session.query(TAvatarVerify).filter(TAvatarVerify.uid == uid).first()
if is_allow:
row.allow = 0
row.allow_time = datehelper.get_today_str()
session.add(row)
result = session.query(TUser).filter(TUser.id == int(uid)).update({
TUser.avatar:row.avatar
})
if result > 0:
if r.exists('u'+str(uid)):
r.hset('u'+str(uid), 'avatar', row.avatar)
sys_achi = SystemAchievement(session,uid)
if not sys_achi.is_achievement_finished(AT_UPLOAD_AVATAR):
sys_achi.finish_upload_avatar()
MessageManager.push_notify_reward(r, uid)
MessageManager.send_mail(session, uid, 0, title='头像审核', content='头像审核成功', type=0)
MessageManager.push_notify_mail(r, uid)
else:
row.allow = -1
row.allow_time = datehelper.get_today_str()
session.add(row)
MessageManager.send_mail(session, uid, 0, title='头像审核', content='头像审核失败', type=0)
MessageManager.push_notify_mail(r, uid)
session.flush()
return redirect('/avatar_verify')
@app.route('/upgrade', methods=['GET', 'POST'])
def upgrade():
allow_ips = ['218.17.162.125']
remote = request.headers['X-Forwarded-For']
if remote not in (allow_ips):
abort(403)
if request.method == 'POST':
file = request.files['file']
if file == None:
return jsonify(result=0,message='error,file or uid or device_id is empty!')
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
backup_file = os.path.join(os.path.join(UPGRADE_PATH, BACK_UP), filename)
if os.path.exists(backup_file):
os.remove(backup_file)
file.save(backup_file)
if os.path.exists(os.path.join(UPGRADE_PATH, filename.split('.')[0])):
__import__('shutil').rmtree(os.path.join(UPGRADE_PATH, filename.split('.')[0]))
unzip_file(filename, UPGRADE_PATH)
# return jsonify(result=0,message='success,message:'+full_filename+',path:'+path_url,url=path_url)
# return jsonify(result=-1,message='error:upload return false')
# if execute(conn, "UPDATE `user` SET `avatar` = '%s' WHERE `id`=%d" % (path_url, int(uid))):
# return jsonify(result=0,message='success,message:'+full_filename+',path:'+path_url,url=path_url)
# return jsonify(result=-1,message='error:upload return false')
#return jsonify(rr=0,mm='success,message:'+full_filename+',path:'+path_url,uu=path_url)
pathDir = os.listdir(UPGRADE_PATH)
html = ''
for allDir in pathDir:
# child = os.path.join('%s%s' % (REAL_PATH, allDir))
html +='<li><a href="'+request.url_root+UPGRADE_FOLDER+'/'+allDir+'">'+request.url_root+UPGRADE_FOLDER+'/'+allDir+'</a></li>'
return '''
<!doctype html>
<title>Upload new File</title>
<h1>Upload new <span style='color:red;'>App</span></h1>
<form action="" method=post enctype=multipart/form-data>
<p><input type=file name=file>
<input type=submit value=Upload>
</form>
<ol>
%s
</ol>
''' % html
@app.route('/customer/robot_send_msg')
def customer_robot_send_msg():
user = request.args['user']
content = request.args['content']
to = request.args['to']
user_talk = customer_service.get_talk_user(user+'_'+to, 'talk_robot_session')
user_talk.add_talk(user, content, img = '', is_new = False, is_self = True)
user_talk.set_back()
customer_service.update_talks(user+'_'+to, user_talk, 'talk_robot_session')
msg_id = r.incr('message_id')
db_session = get_context('session')
if db_session == None:
set_context('session', session)
from_robot = da.get_user(to)
r.hset('message_'+str(user), msg_id, json.dumps({
'from_user': from_robot.id,
'to':user,
'message':content,
'time':int(time.time()),
'from_user_nick':from_robot.nick,
'from_user_avatar':from_robot.avatar
}))
r.rpush('customer_msgs', str(msg_id)+'_'+user)
resp = make_response(jsonify(code=200))
resp.headers['Access-Control-Allow-Origin'] = '*'
resp.headers['Access-Control-Allow-Headers'] = 'x-requested-with,content-type'
return resp
@app.route('/robot/lists', methods=['GET', 'POST'])
def robot_lists():
lists = customer_service.get_talks('talk_robot_session')
resp = make_response(jsonify([x.talk for x in lists]))
resp.headers['Access-Control-Allow-Origin'] = '*'
return resp
@app.route('/customer/lists', methods=['GET', 'POST'])
def customer_lists():
lists = customer_service.get_talks()
resp = make_response(jsonify([x.talk for x in lists]))
resp.headers['Access-Control-Allow-Origin'] = '*'
return resp
@app.route('/talk', methods=['GET', 'POST'])
def talk():
if request.method == 'POST':
wordhelper.set_user_no_talk(r, int(request.form['uid']), int(request.form['timeout']))
return redirect(url_for('talk'))
no_talk_users = r.keys('talk_no:*')
no_talk_html = []
for user in no_talk_users:
timeout = r.ttl(user)
if timeout == None:
no_talk_html.append('<p>user=%s , timeout=0</p>' % user)
else:
no_talk_html.append('<p>user=%s , timeout=%s</p>' % (user, timeout))
#print user,type(user),timeout,type(timeout)
#no_talk_html.append('<p>user=%s , timeout=%s</p>' % (user, timeout))
return '<html><head><meta name="viewport" content="width=device-width, initial-scale=1.0"><title>talk</title></head><body>' \
'<div style="background:#00CC66;padding:10px;max-width:500px;margin:0 auto;border-radius: 10px;border: 1px solid #cecece;">' \
'<form method="post" action="/talk" name="no_talk">' \
'<input type="text" placeholder="用户的UID" name="uid" /><input type="text" placeholder="0永久,>0指定秒数" name="timeout" /><input type="submit" value="提交" />' \
'</form>' \
'%s' \
'</div></body></html>' % "".join(no_talk_html)
@app.route('/black', methods=['GET', 'POST'])
def black():
if request.method == 'POST':
uid = int(request.form['uid'])
if int(request.form['cancel']) > 0:
r.lpush('sys_kick',json.dumps({'uid':uid,'cancel':1}))
r.hdel('sys_blacklist',uid)
else:
r.lpush('sys_kick',json.dumps({'uid':uid,'cancel':0}))
r.hset('sys_blacklist',uid,1)
return redirect(url_for('black'))
blacklist_user = [int(x) for x in r.hkeys('sys_blacklist')]
blacklist_user_html = []
for user in blacklist_user:
blacklist_user_html.append('<p>user=%s' % user)
return '<html><head><meta name="viewport" content="width=device-width, initial-scale=1.0"><title>blacklist</title></head><body>' \
'<div style="background:#00CCCC;padding:10px;max-width:500px;margin:0 auto;border-radius: 10px;border: 1px solid #cecece;">' \
'<form method="post" action="/black" name="black">' \
'<input type="text" placeholder="用户的UID" name="uid" /><label>撤销<input name="cancel" type="radio" value="1" /></label><label>加入<input name="cancel" checked="checked" type="radio" value="0" /></label><input type="submit" value="提交" />' \
'</form>' \
'%s' \
'</div></body></html>' % "".join(blacklist_user_html)
@app.route('/customer/send_msg')
def customer_send_msg():
user = request.args['user']
content = request.args['content']
user_talk = customer_service.get_talk_user(user)
user_talk.add_talk(user, content, img = '', is_new = False, is_self = True)
user_talk.set_back()
customer_service.update_talks(user, user_talk)
msg_id = r.incr('message_id')
r.hset('message_'+str(user), msg_id, json.dumps({
'from_user':10000,
'to':user,
'message':content,
'time':int(time.time()),
'from_user_nick':u'客服',
'from_user_avatar':u''
}))
r.rpush('customer_msgs', str(msg_id)+'_'+user)
resp = make_response(jsonify(code=200))
resp.headers['Access-Control-Allow-Origin'] = '*'
resp.headers['Access-Control-Allow-Headers'] = 'x-requested-with,content-type'
return resp
# items = session.query(TCustomerServiceLog).order_by(desc(TCustomerServiceLog.send_time)).limit(20)
# for item in items:
# key = 'u'+str(item.from_user)
# user_info = r.hgetall(key)
# print '============>'
# print user_info
# item.from_user_nick = user_info.get('nick')
# item.from_user_avatar = user_info.get('avatar')
# return render_template('customer.html',items = items)
@app.route('/war')
def war():
file = 'war_tools.py'
file_dir = '/data/backend/code/tools'
file_path = file_dir+os.path.sep+file
p = subprocess.Popen('python %s' % file_path,stdout=subprocess.PIPE,shell=True)
table_info = p.stdout.readlines()
p = None
s = ''
for x in list(table_info):
s += '<p>'+x+'</p>'
return '<html><head><meta name="viewport" content="width=device-width, initial-scale=1.0"><title>war</title></head><body><div style="background:#CCFFCC;padding:10px;max-width:500px;margin:0 auto;border-radius: 10px;border: 1px solid #cecece;">' \
'%s' \
'</div></body></html>' % s
@app.route('/texas')
def texas():
to = r.hgetall('texas_online')
s = ''
for k,v in to.items():
s += '<p>%s === %s</p>' % (k, v)
return '<html><head><meta name="viewport" content="width=device-width, initial-scale=1.0"><title>texas</title></head><body><div style="background:#CCFFCC;padding:10px;max-width:500px;margin:0 auto;border-radius: 10px;border: 1px solid #cecece;">' \
'%s' \
'</div></body></html>' % s
def run_game_cmd(cmd):
# file_dir = '/data/backend/sh/' #qa
file_dir = '/data/projects/backend/sh/' #dev
cmd_path = os.path.join(file_dir,cmd)
execute_str = '%s.sh reload' % cmd_path
p = subprocess.Popen(execute_str, stdout=subprocess.PIPE,shell=True)
return p.stdout.readlines()
@app.route('/table')
def table():
file = 'online_stat.py'
file_dir = '/root/tools'
file_path = file_dir+os.path.sep+file
p = subprocess.Popen('python %s' % file_path,stdout=subprocess.PIPE,shell=True)
table_info = p.stdout.readlines()
p = None
s = ''
for x in list(table_info):
s += '<p>'+x+'</p>'
return '<html><head><meta name="viewport" content="width=device-width, initial-scale=1.0"><title>table</title></head><body><div style="background:#BBF6FF;padding:10px;max-width:500px;margin:0 auto;border-radius: 10px;border: 1px solid #cecece;">' \
'%s' \
'</div></body></html>' % s
@app.route('/broadcast', methods=['GET','POST'])
def broadcast():
allow_ips = ['218.17.162.125']
if request.remote_addr not in (allow_ips):
abort(403)
if request.method == 'GET':
html_str = '<form method="post" name="list_form"><div style="background:#E2C846;padding:10px;max-width:800px;margin:0 auto;border-radius: 10px;border: 1px solid #cecece;">' \
'<textarea name="broadcast_textarea" id="txt" style="width:100%;height:300px;margin-bottom:4px;">'
broadcast_json = json.loads(r.get('broadcast_json'))
html_str += json.dumps(broadcast_json, ensure_ascii=False)
html_str += '</textarea></form>'
html_str += '<div style="text-align:right;"><input type="submit" style="text-align:right" name="btn_save" value="保存并推送"/></div>'
html_str += '</div>'
html_str += '<form method="post" name="new_form"><div style="background:#ccc;padding:10px;;max-width:800px;margin:10px auto;border-radius: 10px;border: 1px solid #cecece;">' \
'<input type="text" required name="broadcast_new" style="width:100%;margin-bottom:4px;" />' \
'<div style="text-align:right;"><input type="submit" name="btn_save_push" value="保存并推送"/>' \
'<input type="submit" name="btn_fix_push" disabled="disabled" value="维护模式推送"/></div>' \
'</div></form>'
flash_str = ''
messages =get_flashed_messages()
if messages:
flash_str = '<div style="max-width:800px;margin:0 auto;">'
for msg in messages:
flash_str += '<p style="background:#d9edf7;padding:15px;">'
flash_str += msg
flash_str +='</p>'
flash_str += '</div>'
return '<html><head><meta name="viewport" content="width=device-width, initial-scale=1.0"><title>broadcast</title><style>.hover:hover{background:#06f;}</style></head>' \
'<body>' \
'%s' \
'%s' \
'<script>var txt = document.getElementById("txt").value;document.getElementById("txt").value=JSON.stringify(JSON.parse(txt),null,4);</script></body></html>' % (flash_str, html_str)
if 'btn_save' in request.form:
broadcast_json = json.loads(request.form['broadcast_textarea'])
r.set('broadcast_json', json.dumps(broadcast_json))
flash(u'保存列表消息成功')
return redirect(url_for('broadcast'))
elif 'btn_save_push' in request.form:
broadcast_new = request.form['broadcast_new']
broadcast_json = json.loads(r.get('broadcast_json'))
broadcast_json.append({'message':broadcast_new})
r.set('broadcast_json', json.dumps(broadcast_json))
flash(u'新增消息并推送成功')
MessageManager.push_message(r, r.hkeys('online'),PUSH_TYPE['sys_broadcast'],{'message':broadcast_new})
return redirect(url_for('broadcast'))
elif 'btn_fix_push' in request.form:
broadcast_new = request.form['broadcast_new']
broadcast_json = json.loads(r.get('broadcast_json'))
broadcast_json.append({'message':broadcast_new})
r.set('broadcast_json', json.dumps(broadcast_json))
flash(u'推送维护消息成功')
MessageManager.push_message(r, r.hkeys('online'),PUSH_TYPE['fix_broadcast'],{'message':broadcast_new})
return redirect(url_for('broadcast'))
# else:
# return redirect(url_for('broadcast'))
@app.route('/pay_result_new', methods=['POST'])
def pay_result_new():
data = json.loads(request.form['data'])
sign = request.form['sign']
# data['private'] = json.loads(data['private'])
# 校验
# if data['private']['privateInfo'] != get_md5(data['private']['order']+CHARGE_KEY+data['private']['other']):
# return json.dumps({'status':'FAIL'})
# if get_sign(request.form['data']) != sign:
# return json.dumps({'status':'FAIL'})
if get_context('session') == None:
set_context('session', session)
if receive_order_callback(session, r, da, data):
return json.dumps({
'status':'SUCCESS'
})
else:
return json.dumps({
'status':'FAIL'
})
@app.route('/pay_result', methods=['POST'])
def pay_result():
data = json.loads(request.form['data'])
sign = request.form['sign']
data['private'] = json.loads(data['private'])
# 校验
if data['private']['privateInfo'] != get_md5(data['private']['order']+CHARGE_KEY+data['private']['other']):
return json.dumps({
'status':'FAIL'
})
if get_sign(request.form['data']) != sign:
return json.dumps({
'status':'FAIL'
})
order_handle = OrderHandle(session)
order_handle.get_order(data['private']['order'], data['private']['other'])
print '--------->!!!!order',order_handle.order
if order_handle.order_is_none():
return json.dumps({
'status':'FAIL'
})
order = order_handle.order
print '==============>3'
# 加载旧的充值排行
rank_upp = RankUpper(session)
rank_upp.get_old_index(order.uid)
charge_money = decimal.Decimal( data['money'] )
# 充值金额相等时
status = 0
if decimal.Decimal(order.money) != charge_money:
# 充值金额不相等时
status = 1
charge_money = order_handle.order.money
order_handle.update_order_status(data['private']['order'],data['order_sn'], charge_money, status)
data['uid'] = order.uid
print '======================>5',data
mail = TMail()
mail.from_user = 10000
mail.to_user = order.uid
mail.sent_time = time.time()
mail.title = u'充值'
mail.type = 0
mail.diamond = 0
mail.gold = 0
mail.state = 1
item_price = 0
if order.shop_id > 0 and order.shop_id < 2000:
shop_item = session.query(TChargeItem).filter(TChargeItem.id == order.shop_id).first()
item_price = int(shop_item.money)
mail.content = u'成功充值%.2f元' % (charge_money)
if shop_item.type is not None and shop_item.type == 'gold':
mail.content += u',购买%d金币' % (shop_item.gold)
if shop_item.type is not None and shop_item.type == 'diamond':
mail.content += u',购买%d个钻石' % (shop_item.diamond)
if shop_item.extra_diamond is not None and shop_item.extra_diamond > 0:
mail.content += u',赠送%d个钻石' % shop_item.extra_diamond
elif order.shop_id >= 2000:
pop_activity = session.query(TPopActivity).filter(TPopActivity.id == order.shop_id).first()
mail.content = pop_activity.description
elif order.shop_id == 0:
item_price = (FRIST_CHARGE['money'] / 100)
mail.content = u'首冲成功%.2f元,获得%d万金币,%d个张喇叭卡,%d张踢人卡,%d张vip经验卡' %\
(item_price,FRIST_CHARGE['gold'] ,FRIST_CHARGE['hore'],FRIST_CHARGE['kicking_card'],FRIST_CHARGE['vip_card'])
else:
quick_charge = QUICK_CHARGE[abs(order.shop_id)-1]
item_price = quick_charge[0] / 100
mail.content = u'快充成功%.2f元,获得%d万金币' % (item_price ,quick_charge[1] )
mail.content += u',获得%d张流量券' % int(charge_money)
print 'shop_id--->',order.shop_id
session.add(mail)
session.flush()
# Mail(from_user=10000, to_user=order.uid, title=u'流量卡', content=).send_mail(session)
print 'mail_id--->',mail.id
user_info = session.query(TUser).filter(TUser.id == order.uid).first()
# 加vip经验
vip_exp_log = ''
old_vip_exp = 0 if user_info.vip_exp <= 0 else user_info.vip_exp
old_vip_level = vip.to_level(old_vip_exp)
vip_exp_log = 'old: %d, vip:%d, exp:%d \n' % (user_info.id, old_vip_level, old_vip_exp)
new_vip_exp = old_vip_exp + item_price
vip_exp_log += 'new: %d, exp:%d, item_price:%s' % (user_info.id, new_vip_exp,str(item_price))
vip_exp_log += '---------------------------------------------\n'
session.query(TUser).filter(TUser.id == user_info.id).update({
TUser.vip_exp:new_vip_exp
})
user_info.vip_exp = new_vip_exp
session.flush()
new_vip_level = vip.to_level(user_info.vip_exp)
with open('vip.log', 'a') as fp:
fp.write(vip_exp_log)
# vip升级送金币、道具、发广播
if old_vip_level < new_vip_level:
diff_level = new_vip_level - old_vip_level
if diff_level > 0:
print 'diff------------------->item_price',diff_level,item_price
push_message(r,r.hkeys('online'),2,{'nick':user_info.nick,'vip_exp':user_info.vip_exp})
print 'tolevel------>',vip.to_level(user_info.vip_exp)
SystemAchievement(session,user_info.id).finish_upgrade_vip(vip.to_level(user_info.vip_exp))
session.flush()
# 通知用户充值成功
push_message(r,[user_info.id],0,'',N_CHARGE)
# if r.exists('u'+str(order.uid)):
# r.delete('u'+str(order.uid))
gold = 0
diamond = 0
user_info = session.query(TUser).filter(TUser.id == order.uid).first()
if order.shop_id == 0 and user_info.is_charge == 0:
gold = FRIST_CHARGE['gold']
diamond = FRIST_CHARGE['diamond']
session.query(TUser).with_lockmode("update").filter(TUser.id == order.uid, TUser.is_charge == 0).update({
TUser.is_charge:1,
TUser.gold:TUser.gold + gold * 10000,
TUser.diamond:TUser.diamond + diamond,
TUser.money:TUser.money + charge_money,
})
for item in FRIST_CHARGE['items'].split(','):
arr_item = item.split('-')
save_countof(session, {
'uid':order.uid,
'stuff_id':arr_item[0],
'countof':arr_item[1],
})
elif order.shop_id < 0:
gold = quick_charge[1] * 10000
session.query(TUser).with_lockmode("update").filter(TUser.id == order.uid).update({
TUser.gold:TUser.gold + gold,
TUser.money:TUser.money + charge_money,
})
elif order.shop_id >= 2000:
gold = pop_activity.gold
money = pop_activity.money
session.query(TUser).with_lockmode("update").filter(TUser.id == order.uid).update({
TUser.gold : TUser.gold + gold,
TUser.money : TUser.money + money,
})
new_pop_activity_user = TPopActivityUser()
new_pop_activity_user.uid = user_info.id
new_pop_activity_user.activity_id = pop_activity.id
session.add(new_pop_activity_user)
session.flush()
else:
gold = shop_item.gold
diamond = shop_item.diamond + shop_item.extra_diamond
session.query(TUser).with_lockmode("update").filter(TUser.id == order.uid).update({
TUser.gold:TUser.gold + gold,
TUser.diamond:TUser.diamond + diamond,
TUser.money:TUser.money + charge_money,
})
session.flush()
session.query(TLucky).filter(TLucky.uid == order.uid).update({
TLucky.lucky: TLucky.lucky + (charge_money * 10)
})
ChargeTop.save_rank(session, order.uid, gold,diamond, charge_money)
rank_upp.get_new_index(order.uid)
print '-------------->',rank_upp.before_index,rank_upp.after_index,type(rank_upp.before_index),type(rank_upp.after_index),user_info
if rank_upp.is_up():
MessageManager.push_message(r, r.hkeys('online'),PUSH_TYPE['sys_broadcast'],{'message':BORADCAST_CONF['charge_top'] % (user_info.nick, rank_upp.after_index)})
print 'status, success'
new_user_info = da.get_user(order.uid, True)
# 更新用户信息队列
r.lpush('war_user_update',json.dumps({'uid':order.uid}))
print 'update user info gold,old_gold:%d,new_gold:%d' % (user_info.gold, new_user_info.get_gold())
return json.dumps({
'status':'SUCCESS'
})
def get_sign(data):
return hashlib.md5(PAY_CALLBACK_NEW+data+CP_KEY).hexdigest()
def get_md5(s):
return hashlib.md5(s).hexdigest()
def save_countof(session, fields):
insert_stmt = "INSERT INTO bag_item(uid,item_id,countof) VALUES (:col_1,:col_2,:col_3) ON DUPLICATE KEY UPDATE countof = countof + :col_3;"
session.execute(insert_stmt, {
'col_1':fields['uid'],
'col_2':fields['stuff_id'],
'col_3':fields['countof']
})
session.flush()
def save_charge_top(session, fields):
insert_stmt = "INSERT INTO rank_charge_top(add_date,uid,charge_money) VALUES (:col_1,:col_2,:col_3) ON DUPLICATE KEY UPDATE charge_money = charge_money + :col_3;"
session.execute(insert_stmt, {
'col_1':time.strftime('%Y-%m-%d'),
'col_2':fields['uid'],
'col_3':fields['charge_money']
})
session.flush()
# push_message(r,[user_info.id],0,'',N_CHARGE)
# push_message(r,r.hkeys('online'),2,{'nick':user_info.nick,'vip_exp':user_info.vip_exp})
def push_message(r,users,p1,p2,notifi_type = 1):
item = {'users':users,'notifi_type':notifi_type}
if p1 is not None:
item['param1'] = p1
if p2 is not None:
item['param2'] = p2
r.lpush('notification_queue', json.dumps(item))
def unzip_file(zipfilename, unziptodir):
zfobj = zipfile.ZipFile(os.path.join(os.path.join(unziptodir,BACK_UP),zipfilename))
for name in zfobj.namelist():
name = name.replace('\\','/')
if name.endswith('/'):
os.mkdir(os.path.join(unziptodir, name))
else:
ext_filename = os.path.join(unziptodir, name)
ext_dir= os.path.dirname(ext_filename)
if not os.path.exists(ext_dir) : os.mkdir(ext_dir,0777)
outfile = open(ext_filename, 'wb')
outfile.write(zfobj.read(name))
outfile.close()
if __name__ == '__main__':
# Entry the application
app.run() | [
"[email protected]"
]
| |
448b224c06b11dcad0efb3a0e001204a1051988c | 0aad1f032876366b555d4b4d0bd80ad2ae74c226 | /src/ingest-pipeline/md/data_file_types/metadatatsv_metadata_file.py | 98ca0487a1a569a4afbe695e9ac270212a2fecee | [
"MIT"
]
| permissive | icaoberg/ingest-pipeline | d876c33e5f5414f21c6194cd2085c02a126ef73c | 8c8296daaf2a3a71cd213a6b7b8a067739fa5272 | refs/heads/master | 2022-11-19T11:54:00.264295 | 2020-07-01T21:07:08 | 2020-07-01T21:07:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,607 | py | #! /usr/bin/env python
import csv
import os
from pathlib import Path
from metadata_file import MetadataFile
from type_base import MetadataError
from submodules import ingest_validation_tools_submission, ingest_validation_tools_error_report
class MetadataTSVMetadataFile(MetadataFile):
"""
A metadata file type for the specialized metadata.tsv files used to store submission info
"""
category_name = 'METADATATSV';
def collect_metadata(self):
# print('validating {} as metadata.tsv'.format(self.path))
# dirpath = Path(os.path.dirname(self.path))
# submission = ingest_validation_tools_submission.Submission(directory_path=dirpath,
# ignore_files=os.path.basename(self.path))
# report = ingest_validation_tools_error_report.ErrorReport(submission.get_errors())
# if report.errors:
# # Scan reports an error result
# with open('ingest_validation_tools_report.txt', 'w') as f:
# f.write(report.as_text())
# raise MetadataError('{} failed ingest validation test'.format(self.path))
print('parsing metadatatsv from {}'.format(self.path))
md = []
with open(self.path, 'rU', newline='') as f:
dialect = csv.Sniffer().sniff(f.read(256))
f.seek(0)
reader = csv.DictReader(f, dialect=dialect)
for row in reader:
dct = {k : v for k, v in row.items()}
dct['_from_metadatatsv'] = True
md.append(dct)
return md
| [
"[email protected]"
]
| |
6a53fb6ca5851bcacf7250e3aa6bb7bf3a8c255d | a99372d1c71be907e1fbfb4f7287363ff1f51f56 | /Legal/migrations/0005_category_dated.py | 8eed23ace074af81bb4330caa646d8b82770aca4 | [
"MIT"
]
| permissive | domambia/csdigital-gs1kenya-internal-erp | 43045c219b627453f30da9c6bd62335985f81927 | be36378ad7b960d074dd5841aaadc849ac6356de | refs/heads/master | 2022-12-10T13:49:55.516938 | 2021-10-31T15:08:29 | 2021-10-31T15:08:29 | 164,619,152 | 17 | 14 | null | 2022-12-08T01:44:41 | 2019-01-08T09:59:34 | Python | UTF-8 | Python | false | false | 415 | py | # Generated by Django 2.1.5 on 2019-04-09 13:34
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Legal', '0004_auto_20190409_1242'),
]
operations = [
migrations.AddField(
model_name='category',
name='dated',
field=models.DateField(default=datetime.datetime.now),
),
]
| [
"[email protected]"
]
| |
aa8219a0132e52fcee1ff21e14f8b17086844f66 | d57b51ec207002e333b8655a8f5832ed143aa28c | /.history/gos_20200614062134.py | 0926db4d45f1ea761568bc4b9c5dc96acc72a3aa | []
| no_license | yevheniir/python_course_2020 | b42766c4278a08b8b79fec77e036a1b987accf51 | a152d400ab4f45d9d98d8ad8b2560d6f0b408c0b | refs/heads/master | 2022-11-15T07:13:24.193173 | 2020-07-11T15:43:26 | 2020-07-11T15:43:26 | 278,890,802 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 8,014 | py | # # Імпорт фажливих бібліотек
# from BeautifulSoup import BeautifulSoup
# import urllib2
# import re
# # Створення функції пошуку силок
# def getLinks(url):
# # отримання та присвоєння контенту сторінки в змінну
# html_page = urllib2.urlopen(url)
# # Перетворення контенту в обєкт бібліотеки BeautifulSoup
# soup = BeautifulSoup(html_page)
# # створення пустого масиву для лінків
# links = []
# # ЗА ДОПОМОГОЮ ЧИКЛУ ПРОХЛДИМСЯ ПО ВСІХ ЕЛЕМЕНТАХ ДЕ Є СИЛКА
# for link in soup.findAll('a', attrs={'href': re.compile("^http://")}):
# # Додаємо всі силки в список
# links.append(link.get('href'))
# # повертаємо список
# return links
# -----------------------------------------------------------------------------------------------------------
# # # Імпорт фажливих бібліотек
# import subprocess
# # Створення циклу та використання функції range для генерації послідовних чисел
# for ping in range(1,10):
# # генерування IP адреси базуючись на номері ітерації
# address = "127.0.0." + str(ping)
# # виклик функції call яка робить запит на IP адрес та запис відповіді в змінну
# res = subprocess.call(['ping', '-c', '3', address])
# # За допомогою умовних операторів перевіряємо відповідь та виводимо результат
# if res == 0:
# print "ping to", address, "OK"
# elif res == 2:
# print "no response from", address
# else:
# print "ping to", address, "failed!"
# -----------------------------------------------------------------------------------------------------------
# # Імпорт фажливих бібліотек
# import requests
# # Ітеруємося по масиву з адресами зображень
# for i, pic_url in enumerate(["http://x.com/nanachi.jpg", "http://x.com/nezuko.jpg"]):
# # Відкриваємо файл базуючись на номері ітерації
# with open('pic{0}.jpg'.format(i), 'wb') as handle:
# # Отримуємо картинку
# response = requests.get(pic_url, stream=True)
# # Використовуючи умовний оператор перевіряємо чи успішно виконався запит
# if not response.ok:
# print(response)
# # Ітеруємося по байтах картинки та записуємо батчаси в 1024 до файлу
# for block in response.iter_content(1024):
# # Якщо байти закінчилися, завершуємо алгоритм
# if not block:
# break
# # Записуємо байти в файл
# handle.write(block)
# -----------------------------------------------------------------------------------------------------------
# # Створюємо клас для рахунку
# class Bank_Account:
# # В конструкторі ініціалізуємо рахунок як 0
# def __init__(self):
# self.balance=0
# print("Hello!!! Welcome to the Deposit & Withdrawal Machine")
# # В методі депозит, використовуючи функцію input() просимо ввести суму поповенння та додаємо цю суму до рахунку
# def deposit(self):
# amount=float(input("Enter amount to be Deposited: "))
# self.balance += amount
# print("\n Amount Deposited:",amount)
# # В методі депозит, використовуючи функцію input() просимо ввести суму отримання та віднімаємо цю суму від рахунку
# def withdraw(self):
# amount = float(input("Enter amount to be Withdrawn: "))
# # За допомогою умовного оператора перевіряємо чи достатнього грошей на рахунку
# if self.balance>=amount:
# self.balance-=amount
# print("\n You Withdrew:", amount)
# else:
# print("\n Insufficient balance ")
# # Виводимо бааланс на екран
# def display(self):
# print("\n Net Available Balance=",self.balance)
# # Створюємо рахунок
# s = Bank_Account()
# # Проводимо операції з рахунком
# s.deposit()
# s.withdraw()
# s.display()
# -----------------------------------------------------------------------------------------------------------
# # Створюємо рекурсивну функцію яка приймає десяткове число
# def decimalToBinary(n):
# # перевіряємо чи число юільше 1
# if(n > 1):
# # Якщо так, ділемо на 2 юез остачі та рекурсивно викликаємо функцію
# decimalToBinary(n//2)
# # Якщо ні, виводимо на остачу ділення числа на 2
# print(n%2, end=' ')
# # Створюємо функцію яка приймає бінарне число
# def binaryToDecimal(binary):
# # Створюємо додаткову змінну
# binary1 = binary
# # Ініціалізуємо ще 3 змінню даючи їм значення 0
# decimal, i, n = 0, 0, 0
# # Ітеруємося до тих пір поки передане нами число не буде 0
# while(binary != 0):
# # Отримуємо остачу від ділення нашого чила на 10 на записуємо в змінну
# dec = binary % 10
# # Додаємо до результату суму попереднього результату та добуток від dec та піднесення 2 до степеня номеру ітерації
# decimal = decimal + dec * pow(2, i)
# # Змінюємо binary
# binary = binary//10
# # Додаємо 1 до кількості ітерацій
# i += 1
# # Виводимо результат
# print(decimal)
# -----------------------------------------------------------------------------------------------------------
# # Імпорт фажливих бібліотек
# import re
# # В умовному операторі перевіряємо чи підходить введена пошта під знайдений з інтернету regex
# if re.match(r"[^@]+@[^@]+\.[^@]+", "[email protected]"):
# # Якщо так, виводиму valid
# print("valid")
# -----------------------------------------------------------------------------------------------------------
# Створення функції яка приймає текст для шифрування та здвиг
def encrypt(text,s):
# Створення змінної для результату
result = ""
# Ітеруємося по тексту використовуючи range та довжину тексту
for i in range(len(text)):
# Беремо літеру базуючись на номері ітерації
char = text[i]
# Б
if (char.isupper()):
result += chr((ord(char) + s-65) % 26 + 65)
# Encrypt lowercase characters in plain text
else:
result += chr((ord(char) + s - 97) % 26 + 97)
return result | [
"[email protected]"
]
| |
8570724f11f96fad4fbaa801251f384aaec32139 | 9ba0e059a15f2b24d7c031e039d3564c647a9336 | /tuva/imports/Data - After switching to array storage and before ordereddict.py | 3f44cf593a66cc730f3ed901c4c885f5ef9ade7b | []
| no_license | vatir/tuva | 98714d262cc0fbaf714c29e81af25bad8bb02c6c | a105280951c0a21dd0d9eab60d24545e423e0479 | refs/heads/master | 2023-01-25T04:27:38.558356 | 2023-01-11T17:58:47 | 2023-01-11T17:58:47 | 129,917,130 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,584 | py | from numpy import array
from numpy import append
from numpy import dtype
from collections import OrderedDict
# Is used in testing model run behavior
calltimes = 0
model_init = False
# Build a consistent color list
colorlist = OrderedDict()
colorlist["black"] = (0, 0, 0)
colorlist["red"] = (255, 0, 0)
colorlist["goldenrod"] = (218, 165, 32)
colorlist["magenta 3"] = (205, 0, 205)
colorlist["midnightblue"] = (25, 25, 112)
colorlist["indian red"] = (176, 23, 31)
colorlist["emeraldgreen"] = (0, 201, 87)
colorlist["honeydew 4"] = (131, 139, 131)
colorlist["green 2"] = (0, 238, 0)
colorlist["deepskyblue"] = (0, 191, 255)
colorlist["orangered 2"] = (238, 64, 0)
colorlist["sgi beet"] = (142, 56, 142)
colorlist["manganeseblue"] = (3, 168, 158)
colorlist["cornflowerblue"] = (100, 149, 237)
class ImportOld():
"""
Import old conlin tab delimited data
fileloc is the system path and filename
"""
def __init__(self, fileloc):
from numpy import array
filehandle = open(fileloc, 'r')
self.builtin = ["ind", "ind_error", "dep", "dep_error"]
self.knowncol = {"ind":0, "ind_error":1, "dep":2, "dep_error":3, "group":4}
#self._data = OrderedDict()
self._data = list()
for line in filehandle:
line_list = list()
try:
hoztestentry = 0
for testentry in line.split():
if not (self.knowncol["group"] == hoztestentry):
float(testentry)
hoztestentry += 1
hoztestentry = 0
for entry in line.split():
if (self.knowncol["group"] == hoztestentry):
groupentry = entry
line_list.append(str(entry))
else:
line_list.append(float(entry))
hoztestentry += 1
self._data.append(line_list)
except ValueError:
current_hoz_pos = 0
for entry in line.split():
if entry in self.knowncol.keys():
self.knowncol[entry] = current_hoz_pos
else:
self.knowncol[entry] = current_hoz_pos
current_hoz_pos += 1
self._data = array(self._data)
filehandle.close()
self.init = True
self.Update()
self.init = False
def Update(self):
if self.init == False:
from __main__ import MainFrame
MainFrame.panel.plotpanel.Update()
def traits(self):
traitdict = dict()
for key in self.knowncol.keys():
if key in ["group"]:
traitdict[key] = array(self._data[:, self.knowncol[key]], dtype='S')
if key not in (self.builtin + ["group"]):
traitdict[key] = array(self._data[:, self.knowncol[key]], dtype='f')
return traitdict
def x(self):
return array(self._data[:, self.knowncol["ind"]], dtype='f')
def y(self):
return array(self._data[:, self.knowncol["dep"]], dtype='f')
def xerr(self):
return array(self._data[:, self.knowncol["ind_error"]], dtype='f')
def yerr(self):
return array(self._data[:, self.knowncol["dep_error"]], dtype='f')
def xmin(self):
return float(min(array(self._data[:, self.knowncol["ind"]], dtype='f')))
def ymin(self):
return float(min(array(self._data[:, self.knowncol["dep"]], dtype='f')))
def xmax(self):
return float(max(array(self._data[:, self.knowncol["ind"]], dtype='f')))
def ymax(self):
return float(max(array(self._data[:, self.knowncol["dep"]], dtype='f')))
def Get_Row_Len(self):
return int(self._data.shape[1])
def Get_Col_Len(self):
return int(self._data.shape[0])
def GetNumberRows(self):
return int(self._data.shape[0])
def GetNumberCols(self):
return int(self._data.shape[1])
def GetValue(self, row, col):
value = self._data[(row, col)]
if value is not None:
return value
else:
return ''
def SetValue(self, row, col, value):
if (self.knowncol["group"] == col):
self._data[(row, col)] = str(value)
else:
self._data[(row, col)] = float(value)
self.Update()
| [
"[email protected]"
]
| |
eccc587583776be4807de65835612594c73c72d9 | 4273f6c264fa5a7267557c5e0d338a2cbd27789e | /AIE23/20191102_feature_engineering/a3_feature_engineering/4_reduce_dimension/3D_PCA.py | 5934a47edbdece3ff91c453ba92bed0b0ac92fc7 | []
| no_license | shcqupc/Alg_study | 874d37954ed8ed2cdb3bd492d59cd071836946f5 | 462ee12c72b7f84c5ae45aaf0f65b812d7c1ada1 | refs/heads/master | 2020-07-10T15:26:40.603300 | 2020-03-27T12:53:16 | 2020-03-27T12:53:16 | 204,298,238 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 924 | py | print(__doc__)
# Code source: Gaël Varoquaux
# Modified for documentation by Jaques Grobler
# License: BSD 3 clause
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from sklearn import datasets
from sklearn.decomposition import PCA
# # import some data to play with
iris = datasets.load_iris()
# X = iris.data[:, :2] # we only take the first two features.
y = iris.target
# To getter a better understanding of interaction of the dimensions
# plot the first three PCA dimensions
fig = plt.figure(1, figsize=(8, 6))
ax = Axes3D(fig)
X_reduced = PCA(n_components=3).fit_transform(iris.data)
ax.scatter(X_reduced[:, 0], X_reduced[:, 1], X_reduced[:, 2], c=y)
ax.set_title("First three PCA directions")
ax.set_xlabel("1st eigenvector")
ax.w_xaxis.set_ticklabels([])
ax.set_ylabel("2nd eigenvector")
ax.w_yaxis.set_ticklabels([])
ax.set_zlabel("3rd eigenvector")
ax.w_zaxis.set_ticklabels([])
plt.show() | [
"[email protected]"
]
| |
cc2d8e32f9b31c5a2c802971ec725b3f6c2105ea | 3f7240da3dc81205a0a3bf3428ee4e7ae74fb3a2 | /src/Week4/Practice/reverse.py | 262d73adcf745fdaf0052bcef4f211a71cc4582e | []
| no_license | theguyoverthere/CMU15-112-Spring17 | b4ab8e29c31410b4c68d7b2c696a76b9d85ab4d8 | b8287092b14e82d2a3aeac6c27bffbc95382eb34 | refs/heads/master | 2021-04-27T08:52:45.237631 | 2018-10-02T15:38:18 | 2018-10-02T15:38:18 | 107,882,442 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,185 | py | import cs112_s17_linter
#******************************************************************************#
# Author: Tarique Anwer
# Date: 9/4/2017
# Function: Destructively reverse a list. So if a equals [2, 3, 4], then after
# reverse, a should equal [4, 3, 2]. As is generally true of
# destructive functions, this function does not return a value.
# Args: A list.
# Returns: None
# Raises: NA
#******************************************************************************#
def reverse(a):
lo = 0
hi = len(a) - 1
while lo <= hi:
a[hi], a[lo] = a[lo], a[hi]
lo += 1
hi -= 1
def testReverse():
print("Testing reverse(a)...", end="")
a = []
reverse(a)
assert(a == [])
a= [1, 2, 3]
reverse(a)
assert(a == [3, 2, 1])
a = ["hi", "there", 1, 2, 3]
reverse(a)
assert(a == [3, 2, 1, "there", "hi"])
a = [[1,2], [2,3], [3,4]]
reverse(a)
assert(a == [[3,4], [2,3], [1,2]])
print("Passed!")
#################################################
# testAll and main
#################################################
def testAll():
testReverse()
def main():
bannedTokens = (
#'False,None,True,and,assert,def,elif,else,' +
#'from,if,import,not,or,return,' +
#'break,continue,for,in,while,repr' +
'as,class,del,except,finally,' +
'global,is,lambda,nonlocal,pass,raise,' +
'try,with,yield,' +
#'abs,all,any,bool,chr,complex,divmod,float,' +
#'int,isinstance,max,min,pow,print,round,sum,' +
#'range,reversed,str,string,[,],ord,chr,input,len'+
'__import__,ascii,bin,bytearray,bytes,callable,' +
'classmethod,compile,delattr,dict,dir,enumerate,' +
'eval,exec,filter,format,frozenset,getattr,globals,' +
'hasattr,hash,help,hex,id,issubclass,iter,' +
'list,locals,map,memoryview,next,object,oct,' +
'open,property,set,' +
'setattr,slice,sorted,staticmethod,super,tuple,' +
'type,vars,zip,importlib,imp,{,}')
cs112_s17_linter.lint(bannedTokens=bannedTokens) # check style rules
testAll()
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
93724f50f55a830aa5f7bccdeb6074f7150601fe | 98c6ea9c884152e8340605a706efefbea6170be5 | /examples/data/Assignment_3/bxxbub001/question1.py | 4b05b26459fa0cf1a7ed56c52bbf12d955c4967a | []
| no_license | MrHamdulay/csc3-capstone | 479d659e1dcd28040e83ebd9e3374d0ccc0c6817 | 6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2 | refs/heads/master | 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 253 | py | #assignment 3
#B.Booi
def makeSqr(hight,width):
for i in range (hight):
print("*"*width)
hi = eval(input("Enter the height of the rectangle:\n"))
span = eval(input("Enter the width of the rectangle:\n"))
makeSqr(hi,span)
| [
"[email protected]"
]
| |
bad5bd7500f02ef244bbf720cb53d5388220cf3f | 999f928790a181448fdda17619876c7a39d96bf8 | /bin/waitress-serve | e56b524164b806fa66ae530151a756d3ec584f8e | [
"MIT"
]
| permissive | serashioda/learning_journal2 | adede85fb8092dc23b856ba071b25c6480927a0a | a00e7da8fcdc0179f99bfc2fd4dc7cf77ecd81c2 | refs/heads/master | 2021-01-13T11:00:44.508580 | 2017-01-09T07:09:00 | 2017-01-09T07:09:00 | 77,100,871 | 0 | 1 | null | 2017-01-09T15:05:32 | 2016-12-22T01:52:40 | CSS | UTF-8 | Python | false | false | 263 | #!/Users/Sera/Dropbox/codefellows/401/learning_journal/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from waitress.runner import run
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(run())
| [
"[email protected]"
]
| ||
1200976e1b121cb5062d20f640b8213ddd632f21 | 501615c82801733e69c7447ab9fd68d3883ed947 | /hotfix/.svn/pristine/12/1200976e1b121cb5062d20f640b8213ddd632f21.svn-base | 0dcbbd208f84d8d26629d6051601bfbd3375421b | []
| no_license | az0ne/python | b2e1cc1e925d1fcdb269e7dd4c48e24665deeeee | aec5d23bb412f7dfca374fb5c5b9988c1b817347 | refs/heads/master | 2021-07-18T02:08:46.314972 | 2017-10-27T06:23:36 | 2017-10-27T06:23:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,521 | #!/usr/bin/env python
# -*- coding: utf8 -*-
import sys
import time
from functools import wraps
import cPickle as pickle
from mz_platform.services.core.log_service import log_it
from mz_platform.apis.api_result import ApiResult
from mz_platform.exceptions.mz_exception import MZException
from mz_platform.exceptions.mz_exception import MZSysException
from mz_platform.services.core.cache_service import CacheService
def sys_func_log(func):
"""
@brief sys level log, log function call info
@todo
- record function arg
"""
@wraps(func)
def _deco(*args, **kwargs):
log_it('enter:' + func.__name__)
t = time.time()
ret = func(*args, **kwargs)
d = time.time() - t
log_it('out:%s, during:%fs' % (func.__name__, d))
return ret
return _deco
def business_func_log(func):
"""
@brief business level log, log business info
@todo
- record function arg
"""
@wraps(func)
def _deco(*args, **kwargs):
log_it('enter:' + func.__name__)
t = time.time()
ret = func(*args, **kwargs)
d = time.time() - t
log_it('out:%s, during:%fs' % (func.__name__, d))
return ret
return _deco
def api_except_catcher(func):
"""
@brief catch api function exception
@todo
- record function arg
"""
@wraps(func)
def _deco(*args, **kwargs):
err_code = 0x0000
err_desc = ""
ret = None
try:
ret = func(*args, **kwargs)
except MZException, e:
err_code = e.err_code
err_desc = e.err_desc
ret = None
e.print_exc()
except Exception, e:
e = MZSysException(e, "business exception catched")
err_code = e.err_code
err_desc = e.err_desc
ret = None
e.print_exc()
except:
t, n, tb = sys.exc_info()
e = MZSysException(n, "unknown exception catched")
err_code = e.err_code
err_desc = e.err_desc
ret = None
e.print_exc()
finally:
return ApiResult(err_code, err_desc, ret)
return _deco
class DataCache(object):
"""
@brief 数据chache装饰器类
"""
def __init__(self, ns):
self.ns = ns
def data_cacher_get(self, name, *args, **kwargs):
"""
@brief 获取 name 对应的cache,如果不存在返回None
"""
return None
def data_cacher_set(self, name, value, *args, **kwargs):
"""
@brief 设置 name 对应的cache
"""
pass
def is_concern(self, concern, *args, **kwargs):
"""
@brief 是否是需要cache的关键词
"""
return False
def __call__(self, concern):
"""
@brief callable 对象, concern为caller设置的需要cache的关键词
"""
def _wrap(func):
@wraps(func)
def _f(*args, **kwargs):
fn = func.__name__
fn = '%s:%s' % (self.ns, fn)
concernd = self.is_concern(concern, *args, **kwargs)
r = None
if concernd:
r = self.data_cacher_get(fn, *args, **kwargs)
if r:
return r
r = func(*args, **kwargs)
if concernd:
self.data_cacher_set(fn, r, *args, **kwargs)
return r
return _f
return _wrap
class KWDataCache(DataCache):
"""
@brief key/value参数cache类
"""
def data_cacher_get(self, name, *args, **kwargs):
dch = CacheService.default_instance().data_cache_handler
r = dch.get(name, sub_kw_name=kwargs, deserialize_func=pickle.loads)
return r
def data_cacher_set(self, name, value, *args, **kwargs):
dch = CacheService.default_instance().data_cache_handler
dch.set(name, value, sub_kw_name=kwargs, serialize_func=pickle.dumps)
def is_concern(self, concern, *args, **kwargs):
return len(kwargs) == 1 and kwargs.keys()[0] in concern
# #####################################
# example
# @cache_api_data_kw(('key1', 'key2'))
# #####################################
cache_api_data_kw = KWDataCache('api')
| [
"[email protected]"
]
| ||
ce9e0c110ab57c35bd0bdea209890c1efc74d1cb | 28b2144816ce1bf62b7481cd857fdc831a501f6b | /tabook/tests/functional/test_abook.py | e639eb978f75916274865bae76e3895278005cf8 | []
| no_license | t0ster/Turbo-Address-Book | 3191a837d7d28cf9b8c9d20331fe0518062b3892 | 8c5463b1d4423a0c41d7ed75ff9a512ae1bc515b | refs/heads/master | 2020-05-31T14:10:46.449799 | 2011-06-28T14:07:49 | 2011-06-28T14:07:49 | 1,953,556 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 198 | py | from tabook.tests import *
class TestAbookController(TestController):
def test_index(self):
response = self.app.get(url(controller='abook', action='index'))
# Test response...
| [
"[email protected]"
]
| |
43b7a07cce169607e19f1009edb61ff5d942f077 | b70eb5577099f88ae9f684f2c87647f98e26d42b | /hpc-historias-clinicas/diagnosticos/migrations/0004_auto_20150425_1459.py | 3adebbad200ee6845c2de4535b56e1c10fcae385 | []
| no_license | btenaglia/hpc-historias-clinicas | be1a392a119a72055ba643fba9c9a09b740aef47 | 649d8660381381b1c591667760c122d73071d5ec | refs/heads/master | 2020-06-03T19:05:17.910077 | 2015-06-10T23:05:31 | 2015-06-10T23:05:31 | 32,827,786 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 806 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [
('diagnosticos', '0003_auto_20150407_2123'),
]
operations = [
migrations.AlterField(
model_name='diagnosticos',
name='fecha',
field=models.DateField(default=datetime.datetime(2015, 4, 25, 14, 59, 14, 459617), help_text='Formato: dd/mm/yyyy'),
preserve_default=True,
),
migrations.AlterField(
model_name='diagnosticos',
name='hora',
field=models.TimeField(default=datetime.datetime(2015, 4, 25, 14, 59, 14, 459671), help_text='Formato: hh:mm'),
preserve_default=True,
),
]
| [
"[email protected]"
]
| |
3fc765874c033319ae16b8f1f830511729d3e15f | eae4038397ea0b0b1ea56424888f53369a1e4282 | /moai/validation/single.py | f781534910c052c4c1bd83781de737fc6c0832f7 | [
"Apache-2.0"
]
| permissive | iampakos/moai-0.1.0a2 | b2378e9e0a84b85c0e2251a419d39d3da7ea17f9 | 2f066bffc66faca0bdc9af53e7992df34d09ce5d | refs/heads/main | 2023-03-13T13:27:54.318498 | 2021-04-01T14:36:52 | 2021-04-01T14:36:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,469 | py | import moai.utils.engine as mieng
import torch
import omegaconf.omegaconf
import typing
import logging
import inspect
import itertools
log = logging.getLogger(__name__)
__all__ = ['Metric']
class Metric(mieng.Single):
def __init__(self,
metrics: omegaconf.DictConfig,
**kwargs: typing.Mapping[str, typing.Any],
):
super(Metric, self).__init__(
items=metrics,
name="metric",
)
loop = ((key, params) for key, params in kwargs.items() if hasattr(indicators, key))
for k, p in loop:
last_module = self.metric
sig = inspect.signature(last_module.forward)
for keys in zip(*list(p[prop] for prop in itertools.chain(sig.parameters, ['out']))):
self.execs.append(lambda tensor_dict, metric_dict, k=keys, p=sig.parameters.keys(), f=last_module:
metric_dict.update({
k[-1]: f(**dict(zip(p,
list(tensor_dict[i] for i in k[:-1])
)))
})
)
def forward(self,
tensors: typing.Dict[str, torch.Tensor]
) -> typing.Dict[str, torch.Tensor]:
metrics = { }
for exe in self.execs:
exe(tensors, metrics)
returned = { }
for k, m in metrics.items():
returned[k] = torch.mean(m) if len(m.size()) > 0 else m
return returned | [
"[email protected]"
]
| |
738ad03ab4aa7aba2177d4d3cfc449823971f0b0 | 0954a4a6d90fc66beee265c22e1fd829ddaf73cd | /digitalcollege/digital/migrations/0006_remove_departmentmodel_d_year.py | ea613757a14f2f32fa6d6437d7e354ca93266907 | []
| no_license | deepawalekedar319/CollegeMiniProject | a302184258e8213ee7604403e3abc60c19d342a8 | 52f46521c305bca167fdf2f4b41e28182476b3e1 | refs/heads/main | 2023-08-30T04:15:18.851167 | 2021-11-08T05:04:11 | 2021-11-08T05:04:11 | 425,695,835 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 404 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2021-01-04 08:45
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('digital', '0005_departmentmodel_d_year'),
]
operations = [
migrations.RemoveField(
model_name='departmentmodel',
name='d_year',
),
]
| [
"[email protected]"
]
| |
74b937d7824d37e846f7fd8ae0a5899ffe9ab42c | 67b0379a12a60e9f26232b81047de3470c4a9ff9 | /shop/migrations/0064_auto_20170626_1015.py | 8c03515b558ecc2c5e4c92ccd9c34ce2e360a2c9 | []
| no_license | vintkor/whitemandarin | 8ea9022b889fac718e0858873a07c586cf8da729 | 5afcfc5eef1bb1cc2febf519b04a4819a7b9648f | refs/heads/master | 2021-05-06T03:35:09.367375 | 2017-12-20T15:43:08 | 2017-12-20T15:43:08 | 114,904,110 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 766 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-06-26 07:15
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('shop', '0063_auto_20170610_1623'),
]
operations = [
migrations.AddField(
model_name='category',
name='hotlineurl',
field=models.CharField(blank=True, max_length=250, null=True, verbose_name=b'Hotline url'),
),
migrations.AlterField(
model_name='colorproduct',
name='lastscan_date',
field=models.DateTimeField(default=datetime.datetime(2017, 6, 26, 10, 15, 53, 286167), verbose_name=b'Lastsscan Date'),
),
]
| [
"[email protected]"
]
| |
ba8a2b51e97ab9ebf588a3e5686a02ea77ce062d | 59202987e7b3b69d6aa294e01de4c1d6651ae808 | /toy.py | a2aec37b58484aedb57540a603599e0de53214c0 | [
"MIT"
]
| permissive | Jiajie-Mei/vae-lagging-encoder | 9e4f5486fc9a4f9613a0430e49650d50fecb2bfa | 642f23112a6dc659ddc915d99b32c5e172cff8f9 | refs/heads/master | 2020-04-18T07:59:45.407179 | 2019-01-24T05:47:53 | 2019-01-24T05:47:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,436 | py | import sys
import pickle
import os
import time
import importlib
import argparse
import numpy as np
import torch
from torch import nn, optim
from data import MonoTextData
from modules import LSTMEncoder, LSTMDecoder
from modules import VAE
from modules import generate_grid
clip_grad = 5.0
decay_epoch = 2
lr_decay = 0.5
max_decay = 5
def init_config():
parser = argparse.ArgumentParser(description='VAE mode collapse study')
# optimization parameters
parser.add_argument('--optim', type=str, default='sgd', help='')
parser.add_argument('--nsamples', type=int, default=1, help='number of samples for training')
parser.add_argument('--iw_nsamples', type=int, default=500,
help='number of samples to compute importance weighted estimate')
# plotting parameters
parser.add_argument('--plot_mode', choices=['multiple', 'single'], default='multiple',
help="multiple denotes plotting multiple points, single denotes potting single point, \
both of which have corresponding figures in the paper")
parser.add_argument('--zmin', type=float, default=-20.0,
help="boundary to approximate mean of model posterior p(z|x)")
parser.add_argument('--zmax', type=float, default=20.0,
help="boundary to approximate mean of model posterior p(z|x)")
parser.add_argument('--dz', type=float, default=0.1,
help="granularity to approximate mean of model posterior p(z|x)")
parser.add_argument('--num_plot', type=int, default=500,
help='number of sampled points to be ploted')
parser.add_argument('--plot_niter', type=int, default=200,
help="plot every plot_niter iterations")
# annealing paramters
parser.add_argument('--warm_up', type=int, default=10)
parser.add_argument('--kl_start', type=float, default=1.0)
# inference parameters
parser.add_argument('--aggressive', type=int, default=0,
help='apply aggressive training when nonzero, reduce to vanilla VAE when aggressive is 0')
# others
parser.add_argument('--seed', type=int, default=783435, metavar='S', help='random seed')
parser.add_argument('--save_plot_data', type=str, default='')
# these are for slurm purpose to save model
parser.add_argument('--jobid', type=int, default=0, help='slurm job id')
parser.add_argument('--taskid', type=int, default=0, help='slurm task id')
args = parser.parse_args()
args.cuda = torch.cuda.is_available()
args.dataset = "synthetic"
if args.plot_mode == "single":
args.num_plot = 50
save_dir = "models/%s" % args.dataset
plot_dir = "plot_data/%s" % args.plot_mode
if not os.path.exists(save_dir):
os.makedirs(save_dir)
if not os.path.exists(plot_dir):
os.makedirs(plot_dir)
args.plot_dir = plot_dir
id_ = "%s_aggressive%d_kls%.2f_warm%d_%d_%d_%d" % \
(args.dataset, args.aggressive, args.kl_start,
args.warm_up, args.jobid, args.taskid, args.seed)
save_path = os.path.join(save_dir, id_ + '.pt')
args.save_path = save_path
# load config file into args
config_file = "config.config_%s" % args.dataset
params = importlib.import_module(config_file).params
args = argparse.Namespace(**vars(args), **params)
args.nz = 1
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if args.cuda:
torch.cuda.manual_seed(args.seed)
return args
def test(model, test_data_batch, mode, args):
report_kl_loss = report_rec_loss = 0
report_num_words = report_num_sents = 0
for i in np.random.permutation(len(test_data_batch)):
batch_data = test_data_batch[i]
batch_size, sent_len = batch_data.size()
# not predict start symbol
report_num_words += (sent_len - 1) * batch_size
report_num_sents += batch_size
loss, loss_rc, loss_kl = model.loss(batch_data, 1.0, nsamples=args.nsamples)
assert(not loss_rc.requires_grad)
loss_rc = loss_rc.sum()
loss_kl = loss_kl.sum()
report_rec_loss += loss_rc.item()
report_kl_loss += loss_kl.item()
mutual_info = calc_mi(model, test_data_batch)
test_loss = (report_rec_loss + report_kl_loss) / report_num_sents
nll = (report_kl_loss + report_rec_loss) / report_num_sents
kl = report_kl_loss / report_num_sents
ppl = np.exp(nll * report_num_sents / report_num_words)
print('%s --- avg_loss: %.4f, kl: %.4f, mi: %.4f, recon: %.4f, nll: %.4f, ppl: %.4f' % \
(mode, test_loss, report_kl_loss / report_num_sents, mutual_info,
report_rec_loss / report_num_sents, nll, ppl))
sys.stdout.flush()
return test_loss, nll, kl, ppl
def calc_iwnll(model, test_data_batch, args):
report_nll_loss = 0
report_num_words = report_num_sents = 0
for id_, i in enumerate(np.random.permutation(len(test_data_batch))):
batch_data = test_data_batch[i]
batch_size, sent_len = batch_data.size()
# not predict start symbol
report_num_words += (sent_len - 1) * batch_size
report_num_sents += batch_size
if id_ % (round(len(test_data_batch) / 10)) == 0:
print('iw nll computing %d0%%' % (id_/(round(len(test_data_batch) / 10))))
loss = model.nll_iw(batch_data, nsamples=args.iw_nsamples)
report_nll_loss += loss.sum().item()
nll = report_nll_loss / report_num_sents
ppl = np.exp(nll * report_num_sents / report_num_words)
print('iw nll: %.4f, iw ppl: %.4f' % (nll, ppl))
sys.stdout.flush()
def calc_mi(model, test_data_batch):
mi = 0
num_examples = 0
for batch_data in test_data_batch:
batch_size = batch_data.size(0)
num_examples += batch_size
mutual_info = model.calc_mi_q(batch_data)
mi += mutual_info * batch_size
return mi / num_examples
def plot_multiple(model, plot_data, grid_z,
iter_, args):
plot_data, sents_len = plot_data
plot_data_list = torch.chunk(plot_data, round(args.num_plot / args.batch_size))
infer_posterior_mean = []
report_loss_kl = report_mi = report_num_sample = 0
for data in plot_data_list:
report_loss_kl += model.KL(data).sum().item()
report_num_sample += data.size(0)
report_mi += model.calc_mi_q(data) * data.size(0)
# [batch, 1]
posterior_mean = model.calc_model_posterior_mean(data, grid_z)
infer_mean = model.calc_infer_mean(data)
infer_posterior_mean.append(torch.cat([posterior_mean, infer_mean], 1))
# [*, 2]
infer_posterior_mean = torch.cat(infer_posterior_mean, 0)
save_path = os.path.join(args.plot_dir, 'aggr%d_iter%d_multiple.pickle' % (args.aggressive, iter_))
save_data = {'posterior': infer_posterior_mean[:,0].cpu().numpy(),
'inference': infer_posterior_mean[:,1].cpu().numpy(),
'kl': report_loss_kl / report_num_sample,
'mi': report_mi / report_num_sample
}
pickle.dump(save_data, open(save_path, 'wb'))
def plot_single(infer_mean, posterior_mean, args):
# [batch, time]
infer_mean = torch.cat(infer_mean, 1)
posterior_mean = torch.cat(posterior_mean, 1)
save_path = os.path.join(args.plot_dir, 'aggr%d_single.pickle' % args.aggressive)
save_data = {'posterior': posterior_mean.cpu().numpy(),
'inference': infer_mean.cpu().numpy(),
}
pickle.dump(save_data, open(save_path, 'wb'))
def main(args):
class uniform_initializer(object):
def __init__(self, stdv):
self.stdv = stdv
def __call__(self, tensor):
nn.init.uniform_(tensor, -self.stdv, self.stdv)
class xavier_normal_initializer(object):
def __call__(self, tensor):
nn.init.xavier_normal_(tensor)
if args.cuda:
print('using cuda')
print(args)
opt_dict = {"not_improved": 0, "lr": 1., "best_loss": 1e4}
train_data = MonoTextData(args.train_data)
vocab = train_data.vocab
vocab_size = len(vocab)
val_data = MonoTextData(args.val_data, vocab=vocab)
test_data = MonoTextData(args.test_data, vocab=vocab)
print('Train data: %d samples' % len(train_data))
print('finish reading datasets, vocab size is %d' % len(vocab))
print('dropped sentences: %d' % train_data.dropped)
sys.stdout.flush()
log_niter = (len(train_data)//args.batch_size)//10
model_init = uniform_initializer(0.01)
emb_init = uniform_initializer(0.1)
encoder = LSTMEncoder(args, vocab_size, model_init, emb_init)
args.enc_nh = args.dec_nh
decoder = LSTMDecoder(args, vocab, model_init, emb_init)
device = torch.device("cuda" if args.cuda else "cpu")
args.device = device
vae = VAE(encoder, decoder, args).to(device)
if args.optim == 'sgd':
enc_optimizer = optim.SGD(vae.encoder.parameters(), lr=1.0)
dec_optimizer = optim.SGD(vae.decoder.parameters(), lr=1.0)
opt_dict['lr'] = 1.0
else:
enc_optimizer = optim.Adam(vae.encoder.parameters(), lr=0.001, betas=(0.9, 0.999))
dec_optimizer = optim.Adam(vae.decoder.parameters(), lr=0.001, betas=(0.9, 0.999))
opt_dict['lr'] = 0.001
iter_ = decay_cnt = 0
best_loss = 1e4
best_kl = best_nll = best_ppl = 0
pre_mi = -1
aggressive_flag = True if args.aggressive else False
vae.train()
start = time.time()
kl_weight = args.kl_start
anneal_rate = (1.0 - args.kl_start) / (args.warm_up * (len(train_data) / args.batch_size))
plot_data = train_data.data_sample(nsample=args.num_plot, device=device, batch_first=True)
if args.plot_mode == 'multiple':
grid_z = generate_grid(args.zmin, args.zmax, args.dz, device, ndim=1)
plot_fn = plot_multiple
elif args.plot_mode == 'single':
grid_z = generate_grid(args.zmin, args.zmax, args.dz, device, ndim=1)
plot_fn = plot_single
posterior_mean = []
infer_mean = []
posterior_mean.append(vae.calc_model_posterior_mean(plot_data[0], grid_z))
infer_mean.append(vae.calc_infer_mean(plot_data[0]))
train_data_batch = train_data.create_data_batch(batch_size=args.batch_size,
device=device,
batch_first=True)
val_data_batch = val_data.create_data_batch(batch_size=args.batch_size,
device=device,
batch_first=True)
test_data_batch = test_data.create_data_batch(batch_size=args.batch_size,
device=device,
batch_first=True)
# plot_data_, _ = plot_data
# train_data_batch = torch.chunk(plot_data_, round(args.num_plot / args.batch_size))
for epoch in range(args.epochs):
report_kl_loss = report_rec_loss = 0
report_num_words = report_num_sents = 0
for i in np.random.permutation(len(train_data_batch)):
batch_data = train_data_batch[i]
batch_size, sent_len = batch_data.size()
# not predict start symbol
report_num_words += (sent_len - 1) * batch_size
report_num_sents += batch_size
# kl_weight = 1.0
kl_weight = min(1.0, kl_weight + anneal_rate)
sub_iter = 1
batch_data_enc = batch_data
burn_num_words = 0
burn_pre_loss = 1e4
burn_cur_loss = 0
while aggressive_flag and sub_iter < 100:
enc_optimizer.zero_grad()
dec_optimizer.zero_grad()
burn_batch_size, burn_sents_len = batch_data_enc.size()
burn_num_words += (burn_sents_len - 1) * burn_batch_size
loss, loss_rc, loss_kl = vae.loss(batch_data_enc, kl_weight, nsamples=args.nsamples)
burn_cur_loss += loss.sum().item()
loss = loss.mean(dim=-1)
loss.backward()
torch.nn.utils.clip_grad_norm_(vae.parameters(), clip_grad)
enc_optimizer.step()
id_ = np.random.random_integers(0, len(train_data_batch) - 1)
batch_data_enc = train_data_batch[id_]
if sub_iter % 15 == 0:
burn_cur_loss = burn_cur_loss / burn_num_words
if burn_pre_loss - burn_cur_loss < 0:
break
burn_pre_loss = burn_cur_loss
burn_cur_loss = burn_num_words = 0
sub_iter += 1
if args.plot_mode == 'single' and epoch == 0 and aggressive_flag:
vae.eval()
with torch.no_grad():
posterior_mean.append(posterior_mean[-1])
infer_mean.append(vae.calc_infer_mean(plot_data[0]))
vae.train()
enc_optimizer.zero_grad()
dec_optimizer.zero_grad()
loss, loss_rc, loss_kl = vae.loss(batch_data, kl_weight, nsamples=args.nsamples)
loss = loss.mean(dim=-1)
loss.backward()
torch.nn.utils.clip_grad_norm_(vae.parameters(), clip_grad)
loss_rc = loss_rc.sum()
loss_kl = loss_kl.sum()
if not aggressive_flag:
enc_optimizer.step()
dec_optimizer.step()
if args.plot_mode == 'single' and epoch == 0:
vae.eval()
with torch.no_grad():
posterior_mean.append(vae.calc_model_posterior_mean(plot_data[0], grid_z))
if aggressive_flag:
infer_mean.append(infer_mean[-1])
else:
infer_mean.append(vae.calc_infer_mean(plot_data[0]))
vae.train()
report_rec_loss += loss_rc.item()
report_kl_loss += loss_kl.item()
if iter_ % log_niter == 0:
train_loss = (report_rec_loss + report_kl_loss) / report_num_sents
if aggressive_flag or epoch == 0:
vae.eval()
mi = calc_mi(vae, val_data_batch)
vae.train()
print('epoch: %d, iter: %d, avg_loss: %.4f, kl: %.4f, mi: %.4f, recon: %.4f,' \
'time elapsed %.2fs' %
(epoch, iter_, train_loss, report_kl_loss / report_num_sents, mi,
report_rec_loss / report_num_sents, time.time() - start))
else:
print('epoch: %d, iter: %d, avg_loss: %.4f, kl: %.4f, recon: %.4f,' \
'time elapsed %.2fs' %
(epoch, iter_, train_loss, report_kl_loss / report_num_sents,
report_rec_loss / report_num_sents, time.time() - start))
sys.stdout.flush()
report_rec_loss = report_kl_loss = 0
report_num_words = report_num_sents = 0
if iter_ % args.plot_niter == 0 and epoch == 0:
vae.eval()
with torch.no_grad():
if args.plot_mode == 'single' and iter_ != 0:
plot_fn(infer_mean, posterior_mean, args)
return
elif args.plot_mode == "multiple":
plot_fn(vae, plot_data, grid_z,
iter_, args)
vae.train()
iter_ += 1
if aggressive_flag and (iter_ % len(train_data_batch)) == 0:
vae.eval()
cur_mi = calc_mi(vae, val_data_batch)
vae.train()
if cur_mi - pre_mi < 0:
aggressive_flag = False
print("STOP BURNING")
pre_mi = cur_mi
# return
print('kl weight %.4f' % kl_weight)
print('epoch: %d, VAL' % epoch)
if args.plot_mode != '':
with torch.no_grad():
plot_fn(vae, plot_data, grid_z, iter_, args)
vae.eval()
with torch.no_grad():
loss, nll, kl, ppl = test(vae, val_data_batch, "VAL", args)
if loss < best_loss:
print('update best loss')
best_loss = loss
best_nll = nll
best_kl = kl
best_ppl = ppl
torch.save(vae.state_dict(), args.save_path)
if loss > opt_dict["best_loss"]:
opt_dict["not_improved"] += 1
if opt_dict["not_improved"] >= decay_epoch and epoch >=15:
opt_dict["best_loss"] = loss
opt_dict["not_improved"] = 0
opt_dict["lr"] = opt_dict["lr"] * lr_decay
vae.load_state_dict(torch.load(args.save_path))
print('new lr: %f' % opt_dict["lr"])
decay_cnt += 1
if args.optim == 'sgd':
enc_optimizer = optim.SGD(vae.encoder.parameters(), lr=opt_dict["lr"])
dec_optimizer = optim.SGD(vae.decoder.parameters(), lr=opt_dict["lr"])
else:
enc_optimizer = optim.Adam(vae.encoder.parameters(), lr=opt_dict["lr"], betas=(0.5, 0.999))
dec_optimizer = optim.Adam(vae.decoder.parameters(), lr=opt_dict["lr"], betas=(0.5, 0.999))
else:
opt_dict["not_improved"] = 0
opt_dict["best_loss"] = loss
if decay_cnt == max_decay:
break
if epoch % args.test_nepoch == 0:
with torch.no_grad():
loss, nll, kl, ppl = test(vae, test_data_batch, "TEST", args)
vae.train()
print('best_loss: %.4f, kl: %.4f, nll: %.4f, ppl: %.4f' \
% (best_loss, best_kl, best_nll, best_ppl))
sys.stdout.flush()
# compute importance weighted estimate of log p(x)
vae.load_state_dict(torch.load(args.save_path))
vae.eval()
test_data_batch = test_data.create_data_batch(batch_size=1,
device=device,
batch_first=True)
with torch.no_grad():
calc_iwnll(vae, test_data_batch, args)
if __name__ == '__main__':
args = init_config()
main(args)
| [
"[email protected]"
]
| |
ddf283ffa12f6b269f7d56040ed800a6480b077c | cee65c4806593554662330368c799c14ec943454 | /src/resource-graph/azext_resourcegraph/vendored_sdks/resourcegraph/models/facet_request_options.py | 4ea94a3bd96311a982f9df2ec766f212ee6e4fad | [
"LicenseRef-scancode-generic-cla",
"MIT"
]
| permissive | azclibot/azure-cli-extensions | d5d1a4ecdfc87fd79f5ad042fb85cdbf881897d2 | c230646258d4b56efb7d44eb7a0230f2943da6f6 | refs/heads/master | 2023-08-28T03:55:02.311902 | 2019-04-04T16:05:45 | 2019-04-04T16:05:45 | 179,548,695 | 1 | 1 | MIT | 2021-07-28T15:26:17 | 2019-04-04T17:54:39 | Python | UTF-8 | Python | false | false | 1,316 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class FacetRequestOptions(Model):
"""The options for facet evaluation.
:param sort_order: The sorting order by the hit count. Possible values
include: 'asc', 'desc'. Default value: "desc" .
:type sort_order: str or ~azure.mgmt.resourcegraph.models.FacetSortOrder
:param top: The maximum number of facet rows that should be returned.
:type top: int
"""
_validation = {
'top': {'maximum': 1000, 'minimum': 1},
}
_attribute_map = {
'sort_order': {'key': 'sortOrder', 'type': 'FacetSortOrder'},
'top': {'key': '$top', 'type': 'int'},
}
def __init__(self, **kwargs):
super(FacetRequestOptions, self).__init__(**kwargs)
self.sort_order = kwargs.get('sort_order', "desc")
self.top = kwargs.get('top', None)
| [
"[email protected]"
]
| |
6ef163ba209fbd301813f10cb5fd3e6c448c401b | 03f40e1e96f78240904ee90ae3257611e0d2b981 | /venv/lib/python3.8/site-packages/pip-19.0.3-py3.8.egg/pip/_vendor/urllib3/contrib/securetransport.py | 77a6041054b1a2b37dacb5061fb15d3a088005ae | []
| no_license | otr0624/StoreApp | bd584a37af668a4055969cdf03fa2e688f51e395 | 76ae4040ccfe1f415c8c2acf88550690cb537290 | refs/heads/master | 2022-04-22T19:35:03.231742 | 2020-04-14T23:43:19 | 2020-04-14T23:43:19 | 255,651,497 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 30,151 | py | """
SecureTranport support for urllib3 via ctypes.
This makes platform-native TLS available to urllib3 users on macOS without the
use of a compiler. This is an important feature because the Python Package
Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL
that ships with macOS is not capable of doing TLSv1.2. The only way to resolve
this is to give macOS users an alternative solution to the problem, and that
solution is to use SecureTransport.
We use ctypes here because this solution must not require a compiler. That's
because pip is not allowed to require a compiler either.
This is not intended to be a seriously long-term solution to this problem.
The hope is that PEP 543 will eventually solve this issue for us, at which
point we can retire this contrib module. But in the short term, we need to
solve the impending tire fire that is Python on Mac without this kind of
contrib module. So...here we are.
To use this module, simply import and inject it::
import urllib3.contrib.securetransport
urllib3.contrib.securetransport.inject_into_urllib3()
Happy TLSing!
"""
from __future__ import absolute_import
import contextlib
import ctypes
import errno
import os.path
import shutil
import socket
import ssl
import threading
import weakref
from .. import util
from ._securetransport.bindings import Security, SecurityConst, CoreFoundation
from ._securetransport.low_level import (
_assert_no_error,
_cert_array_from_pem,
_temporary_keychain,
_load_client_cert_chain,
)
try: # Platform-specific: Python 2
from socket import _fileobject
except ImportError: # Platform-specific: Python 3
_fileobject = None
from ..packages.backports.makefile import backport_makefile
__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
# SNI always works
HAS_SNI = True
orig_util_HAS_SNI = util.HAS_SNI
orig_util_SSLContext = util.ssl_.SSLContext
# This dictionary is used by the read callback to obtain a handle to the
# calling wrapped socket. This is a pretty silly approach, but for now it'll
# do. I feel like I should be able to smuggle a handle to the wrapped socket
# directly in the SSLConnectionRef, but for now this approach will work I
# guess.
#
# We need to lock around this structure for inserts, but we don't do it for
# reads/writes in the callbacks. The reasoning here goes as follows:
#
# 1. It is not possible to call into the callbacks before the dictionary is
# populated, so once in the callback the id must be in the dictionary.
# 2. The callbacks don't mutate the dictionary, they only read from it, and
# so cannot conflict with any of the insertions.
#
# This is good: if we had to lock in the callbacks we'd drastically slow down
# the performance of this code.
_connection_refs = weakref.WeakValueDictionary()
_connection_ref_lock = threading.Lock()
# Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over
# for no better reason than we need *a* limit, and this one is right there.
SSL_WRITE_BLOCKSIZE = 16384
# This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to
# individual cipher suites. We need to do this because this is how
# SecureTransport wants them.
CIPHER_SUITES = [
SecurityConst.TLS_AES_256_GCM_SHA384,
SecurityConst.TLS_CHACHA20_POLY1305_SHA256,
SecurityConst.TLS_AES_128_GCM_SHA256,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_DHE_DSS_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_DHE_DSS_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,
SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA256,
SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA,
SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256,
SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA,
]
# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of
# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version.
_protocol_to_min_max = {
ssl.PROTOCOL_SSLv23: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
}
if hasattr(ssl, "PROTOCOL_SSLv2"):
_protocol_to_min_max[ssl.PROTOCOL_SSLv2] = (
SecurityConst.kSSLProtocol2,
SecurityConst.kSSLProtocol2,
)
if hasattr(ssl, "PROTOCOL_SSLv3"):
_protocol_to_min_max[ssl.PROTOCOL_SSLv3] = (
SecurityConst.kSSLProtocol3,
SecurityConst.kSSLProtocol3,
)
if hasattr(ssl, "PROTOCOL_TLSv1"):
_protocol_to_min_max[ssl.PROTOCOL_TLSv1] = (
SecurityConst.kTLSProtocol1,
SecurityConst.kTLSProtocol1,
)
if hasattr(ssl, "PROTOCOL_TLSv1_1"):
_protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = (
SecurityConst.kTLSProtocol11,
SecurityConst.kTLSProtocol11,
)
if hasattr(ssl, "PROTOCOL_TLSv1_2"):
_protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = (
SecurityConst.kTLSProtocol12,
SecurityConst.kTLSProtocol12,
)
if hasattr(ssl, "PROTOCOL_TLS"):
_protocol_to_min_max[ssl.PROTOCOL_TLS] = _protocol_to_min_max[ssl.PROTOCOL_SSLv23]
def inject_into_urllib3():
"""
Monkey-patch urllib3 with SecureTransport-backed SSL-support.
"""
util.ssl_.SSLContext = SecureTransportContext
util.HAS_SNI = HAS_SNI
util.ssl_.HAS_SNI = HAS_SNI
util.IS_SECURETRANSPORT = True
util.ssl_.IS_SECURETRANSPORT = True
def extract_from_urllib3():
"""
Undo monkey-patching by :func:`inject_into_urllib3`.
"""
util.ssl_.SSLContext = orig_util_SSLContext
util.HAS_SNI = orig_util_HAS_SNI
util.ssl_.HAS_SNI = orig_util_HAS_SNI
util.IS_SECURETRANSPORT = False
util.ssl_.IS_SECURETRANSPORT = False
def _read_callback(connection_id, data_buffer, data_length_pointer):
"""
SecureTransport read callback. This is called by ST to request that data
be returned from the socket.
"""
wrapped_socket = None
try:
wrapped_socket = _connection_refs.get(connection_id)
if wrapped_socket is None:
return SecurityConst.errSSLInternal
base_socket = wrapped_socket.socket
requested_length = data_length_pointer[0]
timeout = wrapped_socket.gettimeout()
error = None
read_count = 0
try:
while read_count < requested_length:
if timeout is None or timeout >= 0:
if not util.wait_for_read(base_socket, timeout):
raise socket.error(errno.EAGAIN, "timed out")
remaining = requested_length - read_count
buffer = (ctypes.c_char * remaining).from_address(
data_buffer + read_count
)
chunk_size = base_socket.recv_into(buffer, remaining)
read_count += chunk_size
if not chunk_size:
if not read_count:
return SecurityConst.errSSLClosedGraceful
break
except (socket.error) as e:
error = e.errno
if error is not None and error != errno.EAGAIN:
data_length_pointer[0] = read_count
if error == errno.ECONNRESET or error == errno.EPIPE:
return SecurityConst.errSSLClosedAbort
raise
data_length_pointer[0] = read_count
if read_count != requested_length:
return SecurityConst.errSSLWouldBlock
return 0
except Exception as e:
if wrapped_socket is not None:
wrapped_socket._exception = e
return SecurityConst.errSSLInternal
def _write_callback(connection_id, data_buffer, data_length_pointer):
"""
SecureTransport write callback. This is called by ST to request that data
actually be sent on the network.
"""
wrapped_socket = None
try:
wrapped_socket = _connection_refs.get(connection_id)
if wrapped_socket is None:
return SecurityConst.errSSLInternal
base_socket = wrapped_socket.socket
bytes_to_write = data_length_pointer[0]
data = ctypes.string_at(data_buffer, bytes_to_write)
timeout = wrapped_socket.gettimeout()
error = None
sent = 0
try:
while sent < bytes_to_write:
if timeout is None or timeout >= 0:
if not util.wait_for_write(base_socket, timeout):
raise socket.error(errno.EAGAIN, "timed out")
chunk_sent = base_socket.send(data)
sent += chunk_sent
# This has some needless copying here, but I'm not sure there's
# much value in optimising this data path.
data = data[chunk_sent:]
except (socket.error) as e:
error = e.errno
if error is not None and error != errno.EAGAIN:
data_length_pointer[0] = sent
if error == errno.ECONNRESET or error == errno.EPIPE:
return SecurityConst.errSSLClosedAbort
raise
data_length_pointer[0] = sent
if sent != bytes_to_write:
return SecurityConst.errSSLWouldBlock
return 0
except Exception as e:
if wrapped_socket is not None:
wrapped_socket._exception = e
return SecurityConst.errSSLInternal
# We need to keep these two objects references alive: if they get GC'd while
# in use then SecureTransport could attempt to call a function that is in freed
# memory. That would be...uh...bad. Yeah, that's the word. Bad.
_read_callback_pointer = Security.SSLReadFunc(_read_callback)
_write_callback_pointer = Security.SSLWriteFunc(_write_callback)
class WrappedSocket(object):
"""
API-compatibility wrapper for Python's OpenSSL wrapped socket object.
Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage
collector of PyPy.
"""
def __init__(self, socket):
self.socket = socket
self.context = None
self._makefile_refs = 0
self._closed = False
self._exception = None
self._keychain = None
self._keychain_dir = None
self._client_cert_chain = None
# We save off the previously-configured timeout and then set it to
# zero. This is done because we use select and friends to handle the
# timeouts, but if we leave the timeout set on the lower socket then
# Python will "kindly" call select on that socket again for us. Avoid
# that by forcing the timeout to zero.
self._timeout = self.socket.gettimeout()
self.socket.settimeout(0)
@contextlib.contextmanager
def _raise_on_error(self):
"""
A context manager that can be used to wrap calls that do I/O from
SecureTransport. If any of the I/O callbacks hit an exception, this
context manager will correctly propagate the exception after the fact.
This avoids silently swallowing those exceptions.
It also correctly forces the socket closed.
"""
self._exception = None
# We explicitly don't catch around this yield because in the unlikely
# event that an exception was hit in the block we don't want to swallow
# it.
yield
if self._exception is not None:
exception, self._exception = self._exception, None
self.close()
raise exception
def _set_ciphers(self):
"""
Sets up the allowed ciphers. By default this matches the set in
util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done
custom and doesn't allow changing at this time, mostly because parsing
OpenSSL cipher strings is going to be a freaking nightmare.
"""
ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES)
result = Security.SSLSetEnabledCiphers(
self.context, ciphers, len(CIPHER_SUITES)
)
_assert_no_error(result)
def _custom_validate(self, verify, trust_bundle):
"""
Called when we have set custom validation. We do this in two cases:
first, when cert validation is entirely disabled; and second, when
using a custom trust DB.
"""
# If we disabled cert validation, just say: cool.
if not verify:
return
# We want data in memory, so load it up.
if os.path.isfile(trust_bundle):
with open(trust_bundle, "rb") as f:
trust_bundle = f.read()
cert_array = None
trust = Security.SecTrustRef()
try:
# Get a CFArray that contains the certs we want.
cert_array = _cert_array_from_pem(trust_bundle)
# Ok, now the hard part. We want to get the SecTrustRef that ST has
# created for this connection, shove our CAs into it, tell ST to
# ignore everything else it knows, and then ask if it can build a
# chain. This is a buuuunch of code.
result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
_assert_no_error(result)
if not trust:
raise ssl.SSLError("Failed to copy trust reference")
result = Security.SecTrustSetAnchorCertificates(trust, cert_array)
_assert_no_error(result)
result = Security.SecTrustSetAnchorCertificatesOnly(trust, True)
_assert_no_error(result)
trust_result = Security.SecTrustResultType()
result = Security.SecTrustEvaluate(trust, ctypes.byref(trust_result))
_assert_no_error(result)
finally:
if trust:
CoreFoundation.CFRelease(trust)
if cert_array is not None:
CoreFoundation.CFRelease(cert_array)
# Ok, now we can look at what the result was.
successes = (
SecurityConst.kSecTrustResultUnspecified,
SecurityConst.kSecTrustResultProceed,
)
if trust_result.value not in successes:
raise ssl.SSLError(
"certificate verify failed, error code: %d" % trust_result.value
)
def handshake(
self,
server_hostname,
verify,
trust_bundle,
min_version,
max_version,
client_cert,
client_key,
client_key_passphrase,
):
"""
Actually performs the TLS handshake. This is run automatically by
wrapped socket, and shouldn't be needed in user code.
"""
# First, we do the initial bits of connection setup. We need to create
# a context, set its I/O funcs, and set the connection reference.
self.context = Security.SSLCreateContext(
None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType
)
result = Security.SSLSetIOFuncs(
self.context, _read_callback_pointer, _write_callback_pointer
)
_assert_no_error(result)
# Here we need to compute the handle to use. We do this by taking the
# id of self modulo 2**31 - 1. If this is already in the dictionary, we
# just keep incrementing by one until we find a free space.
with _connection_ref_lock:
handle = id(self) % 2147483647
while handle in _connection_refs:
handle = (handle + 1) % 2147483647
_connection_refs[handle] = self
result = Security.SSLSetConnection(self.context, handle)
_assert_no_error(result)
# If we have a server hostname, we should set that too.
if server_hostname:
if not isinstance(server_hostname, bytes):
server_hostname = server_hostname.encode("utf-8")
result = Security.SSLSetPeerDomainName(
self.context, server_hostname, len(server_hostname)
)
_assert_no_error(result)
# Setup the ciphers.
self._set_ciphers()
# Set the minimum and maximum TLS versions.
result = Security.SSLSetProtocolVersionMin(self.context, min_version)
_assert_no_error(result)
result = Security.SSLSetProtocolVersionMax(self.context, max_version)
_assert_no_error(result)
# If there's a trust DB, we need to use it. We do that by telling
# SecureTransport to break on server auth. We also do that if we don't
# want to validate the certs at all: we just won't actually do any
# authing in that case.
if not verify or trust_bundle is not None:
result = Security.SSLSetSessionOption(
self.context, SecurityConst.kSSLSessionOptionBreakOnServerAuth, True
)
_assert_no_error(result)
# If there's a client cert, we need to use it.
if client_cert:
self._keychain, self._keychain_dir = _temporary_keychain()
self._client_cert_chain = _load_client_cert_chain(
self._keychain, client_cert, client_key
)
result = Security.SSLSetCertificate(self.context, self._client_cert_chain)
_assert_no_error(result)
while True:
with self._raise_on_error():
result = Security.SSLHandshake(self.context)
if result == SecurityConst.errSSLWouldBlock:
raise socket.timeout("handshake timed out")
elif result == SecurityConst.errSSLServerAuthCompleted:
self._custom_validate(verify, trust_bundle)
continue
else:
_assert_no_error(result)
break
def fileno(self):
return self.socket.fileno()
# Copy-pasted from Python 3.5 source code
def _decref_socketios(self):
if self._makefile_refs > 0:
self._makefile_refs -= 1
if self._closed:
self.close()
def recv(self, bufsiz):
buffer = ctypes.create_string_buffer(bufsiz)
bytes_read = self.recv_into(buffer, bufsiz)
data = buffer[:bytes_read]
return data
def recv_into(self, buffer, nbytes=None):
# Read short on EOF.
if self._closed:
return 0
if nbytes is None:
nbytes = len(buffer)
buffer = (ctypes.c_char * nbytes).from_buffer(buffer)
processed_bytes = ctypes.c_size_t(0)
with self._raise_on_error():
result = Security.SSLRead(
self.context, buffer, nbytes, ctypes.byref(processed_bytes)
)
# There are some result codes that we want to treat as "not always
# errors". Specifically, those are errSSLWouldBlock,
# errSSLClosedGraceful, and errSSLClosedNoNotify.
if result == SecurityConst.errSSLWouldBlock:
# If we didn't process any bytes, then this was just a time out.
# However, we can get errSSLWouldBlock in situations when we *did*
# read some data, and in those cases we should just read "short"
# and return.
if processed_bytes.value == 0:
# Timed out, no data read.
raise socket.timeout("recv timed out")
elif result in (
SecurityConst.errSSLClosedGraceful,
SecurityConst.errSSLClosedNoNotify,
):
# The remote peer has closed this connection. We should do so as
# well. Note that we don't actually return here because in
# principle this could actually be fired along with return data.
# It's unlikely though.
self.close()
else:
_assert_no_error(result)
# Ok, we read and probably succeeded. We should return whatever data
# was actually read.
return processed_bytes.value
def settimeout(self, timeout):
self._timeout = timeout
def gettimeout(self):
return self._timeout
def send(self, data):
processed_bytes = ctypes.c_size_t(0)
with self._raise_on_error():
result = Security.SSLWrite(
self.context, data, len(data), ctypes.byref(processed_bytes)
)
if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0:
# Timed out
raise socket.timeout("send timed out")
else:
_assert_no_error(result)
# We sent, and probably succeeded. Tell them how much we sent.
return processed_bytes.value
def sendall(self, data):
total_sent = 0
while total_sent < len(data):
sent = self.send(data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE])
total_sent += sent
def shutdown(self):
with self._raise_on_error():
Security.SSLClose(self.context)
def close(self):
# TODO: should I do clean shutdown here? Do I have to?
if self._makefile_refs < 1:
self._closed = True
if self.context:
CoreFoundation.CFRelease(self.context)
self.context = None
if self._client_cert_chain:
CoreFoundation.CFRelease(self._client_cert_chain)
self._client_cert_chain = None
if self._keychain:
Security.SecKeychainDelete(self._keychain)
CoreFoundation.CFRelease(self._keychain)
shutil.rmtree(self._keychain_dir)
self._keychain = self._keychain_dir = None
return self.socket.close()
else:
self._makefile_refs -= 1
def getpeercert(self, binary_form=False):
# Urgh, annoying.
#
# Here's how we do this:
#
# 1. Call SSLCopyPeerTrust to get hold of the trust object for this
# connection.
# 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf.
# 3. To get the CN, call SecCertificateCopyCommonName and process that
# string so that it's of the appropriate type.
# 4. To get the SAN, we need to do something a bit more complex:
# a. Call SecCertificateCopyValues to get the data, requesting
# kSecOIDSubjectAltName.
# b. Mess about with this dictionary to try to get the SANs out.
#
# This is gross. Really gross. It's going to be a few hundred LoC extra
# just to repeat something that SecureTransport can *already do*. So my
# operating assumption at this time is that what we want to do is
# instead to just flag to urllib3 that it shouldn't do its own hostname
# validation when using SecureTransport.
if not binary_form:
raise ValueError("SecureTransport only supports dumping binary certs")
trust = Security.SecTrustRef()
certdata = None
der_bytes = None
try:
# Grab the trust store.
result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
_assert_no_error(result)
if not trust:
# Probably we haven't done the handshake yet. No biggie.
return None
cert_count = Security.SecTrustGetCertificateCount(trust)
if not cert_count:
# Also a case that might happen if we haven't handshaked.
# Handshook? Handshaken?
return None
leaf = Security.SecTrustGetCertificateAtIndex(trust, 0)
assert leaf
# Ok, now we want the DER bytes.
certdata = Security.SecCertificateCopyData(leaf)
assert certdata
data_length = CoreFoundation.CFDataGetLength(certdata)
data_buffer = CoreFoundation.CFDataGetBytePtr(certdata)
der_bytes = ctypes.string_at(data_buffer, data_length)
finally:
if certdata:
CoreFoundation.CFRelease(certdata)
if trust:
CoreFoundation.CFRelease(trust)
return der_bytes
def _reuse(self):
self._makefile_refs += 1
def _drop(self):
if self._makefile_refs < 1:
self.close()
else:
self._makefile_refs -= 1
if _fileobject: # Platform-specific: Python 2
def makefile(self, mode, bufsize=-1):
self._makefile_refs += 1
return _fileobject(self, mode, bufsize, close=True)
else: # Platform-specific: Python 3
def makefile(self, mode="r", buffering=None, *args, **kwargs):
# We disable buffering with SecureTransport because it conflicts with
# the buffering that ST does internally (see issue #1153 for more).
buffering = 0
return backport_makefile(self, mode, buffering, *args, **kwargs)
WrappedSocket.makefile = makefile
class SecureTransportContext(object):
"""
I am a wrapper class for the SecureTransport library, to translate the
interface of the standard library ``SSLContext`` object to calls into
SecureTransport.
"""
def __init__(self, protocol):
self._min_version, self._max_version = _protocol_to_min_max[protocol]
self._options = 0
self._verify = False
self._trust_bundle = None
self._client_cert = None
self._client_key = None
self._client_key_passphrase = None
@property
def check_hostname(self):
"""
SecureTransport cannot have its hostname checking disabled. For more,
see the comment on getpeercert() in this file.
"""
return True
@check_hostname.setter
def check_hostname(self, value):
"""
SecureTransport cannot have its hostname checking disabled. For more,
see the comment on getpeercert() in this file.
"""
pass
@property
def options(self):
# TODO: Well, crap.
#
# So this is the bit of the code that is the most likely to cause us
# trouble. Essentially we need to enumerate all of the SSL options that
# users might want to use and try to see if we can sensibly translate
# them, or whether we should just ignore them.
return self._options
@options.setter
def options(self, value):
# TODO: Update in line with above.
self._options = value
@property
def verify_mode(self):
return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE
@verify_mode.setter
def verify_mode(self, value):
self._verify = True if value == ssl.CERT_REQUIRED else False
def set_default_verify_paths(self):
# So, this has to do something a bit weird. Specifically, what it does
# is nothing.
#
# This means that, if we had previously had load_verify_locations
# called, this does not undo that. We need to do that because it turns
# out that the rest of the urllib3 code will attempt to load the
# default verify paths if it hasn't been told about any paths, even if
# the context itself was sometime earlier. We resolve that by just
# ignoring it.
pass
def load_default_certs(self):
return self.set_default_verify_paths()
def set_ciphers(self, ciphers):
# For now, we just require the default cipher string.
if ciphers != util.ssl_.DEFAULT_CIPHERS:
raise ValueError("SecureTransport doesn't support custom cipher strings")
def load_verify_locations(self, cafile=None, capath=None, cadata=None):
# OK, we only really support cadata and cafile.
if capath is not None:
raise ValueError("SecureTransport does not support cert directories")
self._trust_bundle = cafile or cadata
def load_cert_chain(self, certfile, keyfile=None, password=None):
self._client_cert = certfile
self._client_key = keyfile
self._client_cert_passphrase = password
def wrap_socket(
self,
sock,
server_side=False,
do_handshake_on_connect=True,
suppress_ragged_eofs=True,
server_hostname=None,
):
# So, what do we do here? Firstly, we assert some properties. This is a
# stripped down shim, so there is some functionality we don't support.
# See PEP 543 for the real deal.
assert not server_side
assert do_handshake_on_connect
assert suppress_ragged_eofs
# Ok, we're good to go. Now we want to create the wrapped socket object
# and store it in the appropriate place.
wrapped_socket = WrappedSocket(sock)
# Now we can handshake
wrapped_socket.handshake(
server_hostname,
self._verify,
self._trust_bundle,
self._min_version,
self._max_version,
self._client_cert,
self._client_key,
self._client_key_passphrase,
)
return wrapped_socket
| [
"[email protected]"
]
| |
40e64497e28bc9b351edfd900f44634d458a6244 | e13daffd10be4fc8dd004e4d2bd4fc8e0c408840 | /lbworkflow/core/transition.py | adcd71ef73825ff290d50ece913bcb83dd395a8c | []
| no_license | jimmy201602/django-lb-workflow | 47110b49c7a5b7a4d18c2de79cbbd701a0ef5743 | 52a44ed33964d6392c4eeb71047e74c892caa716 | refs/heads/master | 2018-11-26T12:20:44.696867 | 2018-09-05T03:42:36 | 2018-09-05T03:42:36 | 114,515,837 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,851 | py | from django.utils import timezone
from lbworkflow.models import Event
from lbworkflow.models import Task
from .sendmsg import wf_send_msg
def create_event(instance, transition, **kwargs):
act_type = 'transition' if transition.pk else transition.code
if transition.is_agree:
act_type = 'agree'
event = Event.objects.create(
instance=instance, act_name=transition.name, act_type=act_type,
**kwargs)
return event
class TransitionExecutor(object):
def __init__(
self, operator, instance, task, transition=None,
comment='', attachments=[]):
self.wf_obj = instance.content_object
self.instance = instance
self.operator = operator
self.task = task
self.transition = transition
self.comment = comment
self.attachments = attachments
self.from_node = instance.cur_node
# hold&assign wouldn't change node
self.to_node = transition.output_node
self.all_todo_tasks = instance.get_todo_tasks()
self.last_event = None
def execute(self):
# TODO check permission
all_todo_tasks = self.all_todo_tasks
need_transfer = False
if self.transition.routing_rule == 'joint' and self.transition.code not in ['back to', 'rollback']:
if all_todo_tasks.count() == 1:
need_transfer = True
else:
need_transfer = True
self._complete_task(need_transfer)
if not need_transfer:
return
self._do_transfer()
# if is agree should check if need auto agree for next node
if self.transition.is_agree or self.to_node.node_type == 'router':
self._auto_agree_next_node()
def _auto_agree_next_node(self):
instance = self.instance
agree_transition = instance.get_agree_transition()
all_todo_tasks = instance.get_todo_tasks()
if not agree_transition:
return
# if from router, create a task
if self.to_node.node_type == 'router':
task = Task(
instance=self.instance,
node=self.instance.cur_node,
user=self.operator,
)
all_todo_tasks = [task]
for task in all_todo_tasks:
users = [task.user, task.agent_user]
users = [e for e in users if e]
for user in set(users):
if self.instance.cur_node != task.node: # has processed
return
if instance.is_user_agreed(user):
TransitionExecutor(self.operator, instance, task, agree_transition).execute()
def _complete_task(self, need_transfer):
""" close workite, create event and return it """
instance = self.instance
task = self.task
transition = self.transition
task.status = 'completed'
task.save()
to_node = self.to_node if need_transfer else instance.cur_node
self.to_node = to_node
event = None
pre_last_event = instance.last_event()
if pre_last_event and pre_last_event.new_node.node_type == 'router':
event = pre_last_event
event.new_node = to_node
event.save()
if not event:
event = create_event(
instance, transition,
comment=self.comment, user=self.operator,
old_node=task.node, new_node=to_node,
task=task)
if self.attachments:
event.attachments.add(*self.attachments)
self.last_event = event
return event
def _do_transfer_for_instance(self):
instance = self.instance
wf_obj = self.wf_obj
from_node = self.from_node
from_status = from_node.status
to_node = self.to_node
to_status = self.to_node.status
# Submit
if not from_node.is_submitted() and to_node.is_submitted():
instance.submit_time = timezone.now()
wf_obj.on_submit()
# cancel & give up & reject
if from_node.is_submitted() and not to_node.is_submitted():
wf_obj.on_fail()
# complete
if from_status != 'completed' and to_status == 'completed':
instance.end_on = timezone.now()
self.wf_obj.on_complete()
# cancel complete
if from_status == 'completed' and to_status != 'completed':
instance.end_on = None
instance.cur_node = self.to_node
self.wf_obj.on_do_transition(from_node, to_node)
instance.save()
def _send_notification(self):
instance = self.instance
last_event = self.last_event
notice_users = last_event.notice_users.exclude(
pk__in=[self.operator.pk, instance.created_by.pk]).distinct()
wf_send_msg(notice_users, 'notify', last_event)
# send notification to instance.created_by
if instance.created_by != self.operator:
wf_send_msg([instance.created_by], 'transfered', last_event)
def _gen_new_task(self):
last_event = self.last_event
if not last_event:
return
next_operators = last_event.next_operators.distinct()
need_notify_operators = []
for operator in next_operators:
new_task = Task(
instance=self.instance, node=self.to_node,
user=operator)
new_task.update_authorization(commit=True)
# notify next operator(not include current operator and instance.created_by)
if operator not in [self.operator, self.instance.created_by]:
need_notify_operators.append(operator)
agent_user = new_task.agent_user
if agent_user and agent_user not in [self.operator, self.instance.created_by]:
need_notify_operators.append(agent_user)
wf_send_msg(need_notify_operators, 'new_task', last_event)
def update_users_on_transfer(self):
instance = self.instance
event = self.last_event
to_node = event.new_node
next_operators = to_node.get_operators(instance.created_by, self.operator, instance)
event.next_operators.add(*next_operators)
notice_users = to_node.get_notice_users(instance.created_by, self.operator, instance)
event.notice_users.add(*notice_users)
can_view_users = to_node.get_share_users(instance.created_by, self.operator, instance)
instance.can_view_users.add(*can_view_users)
def _do_transfer(self):
self.update_users_on_transfer()
# auto complete all current work item
self.all_todo_tasks.update(status='completed')
self._do_transfer_for_instance()
self._gen_new_task()
self._send_notification()
| [
"[email protected]"
]
| |
c855a992495e5ab628fbe70c872eae00b20b337a | 5d58fa1d54855f18bad5688de4459af8d461c0ac | /tests/unit/modules/network/onyx/test_onyx_ospf.py | 893dc959fa735fa3382b2d96552f34de11bd4532 | []
| no_license | nasirhm/general | b3b52f6e31be3de8bae0414da620d8cdbb2c2366 | 5ccd89933297f5587dae5cd114e24ea5c54f7ce5 | refs/heads/master | 2021-01-04T07:03:21.121102 | 2020-02-13T20:59:56 | 2020-02-13T20:59:56 | 240,440,187 | 1 | 0 | null | 2020-02-14T06:08:14 | 2020-02-14T06:08:13 | null | UTF-8 | Python | false | false | 4,644 | py | #
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible_collections.community.general.tests.unit.compat.mock import patch
from ansible_collections.community.general.plugins.modules import onyx_ospf
from ansible_collections.community.general.tests.unit.modules.utils import set_module_args
from ..onyx_module import TestOnyxModule, load_fixture
class TestOnyxOspfModule(TestOnyxModule):
module = onyx_ospf
def setUp(self):
super(TestOnyxOspfModule, self).setUp()
self._ospf_exists = True
self.mock_get_config = patch.object(
onyx_ospf.OnyxOspfModule,
"_get_ospf_config")
self.get_config = self.mock_get_config.start()
self.mock_get_interfaces_config = patch.object(
onyx_ospf.OnyxOspfModule,
"_get_ospf_interfaces_config")
self.get_interfaces_config = self.mock_get_interfaces_config.start()
self.mock_load_config = patch(
'ansible_collections.community.general.plugins.module_utils.network.onyx.onyx.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
super(TestOnyxOspfModule, self).tearDown()
self.mock_get_config.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None, transport='cli'):
if self._ospf_exists:
config_file = 'onyx_ospf_show.cfg'
self.get_config.return_value = load_fixture(config_file)
config_file = 'onyx_ospf_interfaces_show.cfg'
self.get_interfaces_config.return_value = load_fixture(config_file)
else:
self.get_config.return_value = None
self.get_interfaces_config.return_value = None
self.load_config.return_value = None
def test_ospf_absent_no_change(self):
set_module_args(dict(ospf=3, state='absent'))
self.execute_module(changed=False)
def test_ospf_present_no_change(self):
interface = dict(name='Loopback 1', area='0.0.0.0')
set_module_args(dict(ospf=2, router_id='10.2.3.4',
interfaces=[interface]))
self.execute_module(changed=False)
def test_ospf_present_remove(self):
set_module_args(dict(ospf=2, state='absent'))
commands = ['no router ospf 2']
self.execute_module(changed=True, commands=commands)
def test_ospf_change_router(self):
interface = dict(name='Loopback 1', area='0.0.0.0')
set_module_args(dict(ospf=2, router_id='10.2.3.5',
interfaces=[interface]))
commands = ['router ospf 2', 'router-id 10.2.3.5', 'exit']
self.execute_module(changed=True, commands=commands, sort=False)
def test_ospf_remove_router(self):
interface = dict(name='Loopback 1', area='0.0.0.0')
set_module_args(dict(ospf=2, interfaces=[interface]))
commands = ['router ospf 2', 'no router-id', 'exit']
self.execute_module(changed=True, commands=commands, sort=False)
def test_ospf_add_interface(self):
interfaces = [dict(name='Loopback 1', area='0.0.0.0'),
dict(name='Loopback 2', area='0.0.0.0')]
set_module_args(dict(ospf=2, router_id='10.2.3.4',
interfaces=interfaces))
commands = ['interface loopback 2 ip ospf area 0.0.0.0']
self.execute_module(changed=True, commands=commands)
def test_ospf_remove_interface(self):
set_module_args(dict(ospf=2, router_id='10.2.3.4'))
commands = ['interface loopback 1 no ip ospf area']
self.execute_module(changed=True, commands=commands)
def test_ospf_add(self):
self._ospf_exists = False
interfaces = [dict(name='Loopback 1', area='0.0.0.0'),
dict(name='Vlan 210', area='0.0.0.0'),
dict(name='Eth1/1', area='0.0.0.0'),
dict(name='Po1', area='0.0.0.0')]
set_module_args(dict(ospf=2, router_id='10.2.3.4',
interfaces=interfaces))
commands = ['router ospf 2', 'router-id 10.2.3.4', 'exit',
'interface loopback 1 ip ospf area 0.0.0.0',
'interface vlan 210 ip ospf area 0.0.0.0',
'interface ethernet 1/1 ip ospf area 0.0.0.0',
'interface port-channel 1 ip ospf area 0.0.0.0']
self.execute_module(changed=True, commands=commands)
| [
"[email protected]"
]
| |
ce99b43702930a3bb2d59fddf1e3c9cbc98cc622 | 9f58ec75177221f1e483c1c1cc9166a6229851d1 | /unsorted_solutions/question47.py | 3fa98b758e9854dfd55c4b94d0e28ef84191563b | []
| no_license | neequole/my-python-programming-exercises | 142b520dcfd78e5c92cf01a5cefecdfdb2939ccd | d6806e02cea9952e782e6921b903b1bb414364ee | refs/heads/master | 2021-08-23T04:48:06.211251 | 2017-12-03T11:40:14 | 2017-12-03T11:40:14 | 103,946,124 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 298 | py | """ Question 47:
Write a program which can filter() to make a list whose elements are even number
between 1 and 20 (both included).
Hints:
Use filter() to filter elements of a list.
Use lambda to define anonymous functions.
"""
out = list(filter(lambda x: x % 2 == 0, range(1, 21)))
print(out)
| [
"[email protected]"
]
| |
873d7960f8851a82a605915be61b0e9cb964e7fc | a3b2c7069c9fab8632b0568db5ab79aceacf9c9c | /devel/lib/python2.7/dist-packages/rqt_bag_plugins/__init__.py | c2f70e437d66d6ccfc8247c2b37ce4e8cdbfa026 | []
| no_license | tbake0155/bluedragon_workspace | 08ed85d9de29c178704bd3f883acafae473b175e | 384d863e00689cf40cde4933447210bbb1ba8636 | refs/heads/master | 2021-05-12T01:35:45.896266 | 2018-01-15T14:59:39 | 2018-01-15T14:59:39 | 117,558,143 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,044 | py | # -*- coding: utf-8 -*-
# generated from catkin/cmake/template/__init__.py.in
# keep symbol table as clean as possible by deleting all unnecessary symbols
from os import path as os_path
from sys import path as sys_path
from pkgutil import extend_path
__extended_path = "/home/tim/catkin_ws/src/rqt_common_plugins/rqt_bag_plugins/src".split(";")
for p in reversed(__extended_path):
sys_path.insert(0, p)
del p
del sys_path
__path__ = extend_path(__path__, __name__)
del extend_path
__execfiles = []
for p in __extended_path:
src_init_file = os_path.join(p, __name__ + '.py')
if os_path.isfile(src_init_file):
__execfiles.append(src_init_file)
else:
src_init_file = os_path.join(p, __name__, '__init__.py')
if os_path.isfile(src_init_file):
__execfiles.append(src_init_file)
del src_init_file
del p
del os_path
del __extended_path
for __execfile in __execfiles:
with open(__execfile, 'r') as __fh:
exec(__fh.read())
del __fh
del __execfile
del __execfiles
| [
"[email protected]"
]
| |
d1113c24a628593d5362f4391c033deff3884613 | 2bc194ed3c23d986724928cc5258cdebaa3fa1c6 | /handlers/domain.py | 1d13df00fd3515d75d5832e9aed99271ae339e9d | []
| no_license | mrcheng0910/whoismanage | 057a57637ef4f51fc9d55b181213c2bace9bfe02 | ebd337760a791367bd5b390ad3a86b6247d1251a | refs/heads/master | 2021-01-10T06:14:48.511919 | 2016-01-13T14:35:54 | 2016-01-13T14:35:54 | 46,243,616 | 8 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,302 | py | # encoding:utf-8
"""
功能:域名数量统计所需数据
"""
import decimal
import tornado
import json
from models.domain_db import DomainDb
PATH = './domain/' # 模板地址
class DomainIndexHandler(tornado.web.RequestHandler):
"""各个顶级后缀域名数量统计"""
def get(self):
domains, total = DomainDb().fetch_domain(11)
self.render(
PATH + 'domain_overall.html',
domains=json.dumps(domains),
total=total
)
class DomainTldIndexHandler(tornado.web.RequestHandler):
"""
获取指定顶级域名后缀首页
"""
def get(self):
self.render(PATH+'domain_tld.html')
class DomainTldNumHandler(tornado.web.RequestHandler):
"""
获取指定顶级域名后缀的域名数量
"""
def get(self):
tld = self.get_argument('tld','None')
# total,tld_num,whois_tld,whois_total = DomainDb().get_tld_num(tld)
results = DomainDb().get_tld_num(tld)
self.write(json.dumps(results, cls=DecimalEncoder))
class DecimalEncoder(json.JSONEncoder):
"""
解决json.dumps不能格式化Decimal问题
"""
def default(self, o):
if isinstance(o, decimal.Decimal):
return float(o)
return super(DecimalEncoder, self).default(o) | [
"[email protected]"
]
| |
801b906f1c687e553348f1ebf8c65d7708ca6de7 | c0950683d84a3c5999a28ac32668e8dbd159e036 | /dbrec3d/bof/compute_object_level.py | e0104252dcfc92e3232e2cdb182af15e18fd1a82 | [
"BSD-3-Clause"
]
| permissive | mirestrepo/voxels-at-lems | 0a88751680daa3c48f44f49bb4ef0f855a90fa18 | df47d031653d2ad877a97b3c1ea574b924b7d4c2 | refs/heads/master | 2021-01-17T07:26:12.665247 | 2016-07-20T17:49:26 | 2016-07-20T17:49:26 | 3,919,012 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,497 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Created on Mon Dec 22, 2011
@author:Isabel Restrepo
Save the level of the smallest cell entirely containing the object
"""
import os;
import dbrec3d_batch
import time
import optparse
import sys
import glob
class dbvalue:
def __init__(self, index, type):
self.id = index # unsigned integer
self.type = type # string
#*******************The Main Algorithm ************************#
if __name__=="__main__":
dbrec3d_batch.register_processes();
dbrec3d_batch.register_datatypes();
#Parse inputs
print ("******************************Compute Object level***************************")
parser = optparse.OptionParser(description='Init Category info');
parser.add_option('--bof_dir', action="store", dest="bof_dir");
options, args = parser.parse_args();
bof_dir = options.bof_dir;
if not os.path.isdir(bof_dir +"/"):
print "Invalid bof Dir"
sys.exit(-1);
#load category info
dbrec3d_batch.init_process("bofLoadCategoryInfoProces");
dbrec3d_batch.set_input_string(0, bof_dir);
dbrec3d_batch.set_input_string(1, "bof_info_train.xml")
dbrec3d_batch.set_input_string(2, "bof_category_info_old.xml")
dbrec3d_batch.run_process();
(id, type) = dbrec3d_batch.commit_output(0);
categories= dbvalue(id, type);
#load category info
dbrec3d_batch.init_process("bof_object_level_process");
dbrec3d_batch.set_input_from_db(0, categories);
dbrec3d_batch.run_process();
| [
"[email protected]"
]
| |
784bcb8b10887b7a5bfaf0455c7e559533e0db6b | d324b3d4ce953574c5945cda64e179f33c36c71b | /php/php-sky/grpc/src/python/grpcio_tests/tests/unit/_grpc_shutdown_test.py | 35823c7451f2b5155c92d63cfc80c22d11773841 | [
"Apache-2.0"
]
| permissive | Denticle/docker-base | decc36cc8eb01be1157d0c0417958c2c80ac0d2f | 232115202594f4ea334d512dffb03f34451eb147 | refs/heads/main | 2023-04-21T10:08:29.582031 | 2021-05-13T07:27:52 | 2021-05-13T07:27:52 | 320,431,033 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,855 | py | # Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests the gRPC Core shutdown path."""
import time
import threading
import unittest
import datetime
import grpc
_TIMEOUT_FOR_SEGFAULT = datetime.timedelta(seconds=10)
class GrpcShutdownTest(unittest.TestCase):
def test_channel_close_with_connectivity_watcher(self):
"""Originated by https://github.com/grpc/grpc/issues/20299.
The grpc_shutdown happens synchronously, but there might be Core object
references left in Cython which might lead to ABORT or SIGSEGV.
"""
connection_failed = threading.Event()
def on_state_change(state):
if state in (grpc.ChannelConnectivity.TRANSIENT_FAILURE,
grpc.ChannelConnectivity.SHUTDOWN):
connection_failed.set()
# Connects to an void address, and subscribes state changes
channel = grpc.insecure_channel("0.1.1.1:12345")
channel.subscribe(on_state_change, True)
deadline = datetime.datetime.now() + _TIMEOUT_FOR_SEGFAULT
while datetime.datetime.now() < deadline:
time.sleep(0.1)
if connection_failed.is_set():
channel.close()
if __name__ == '__main__':
unittest.main(verbosity=2)
| [
"[email protected]"
]
| |
26a25d8783f6db156297a1f4e99b7395d3be9e3e | 8dc84558f0058d90dfc4955e905dab1b22d12c08 | /third_party/blink/PRESUBMIT_test.py | 9f1664cef824a1591966ffb4ecf2c8477f9683a3 | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0"
]
| permissive | meniossin/src | 42a95cc6c4a9c71d43d62bc4311224ca1fd61e03 | 44f73f7e76119e5ab415d4593ac66485e65d700a | refs/heads/master | 2022-12-16T20:17:03.747113 | 2020-09-03T10:43:12 | 2020-09-03T10:43:12 | 263,710,168 | 1 | 0 | BSD-3-Clause | 2020-05-13T18:20:09 | 2020-05-13T18:20:08 | null | UTF-8 | Python | false | false | 8,720 | py | #!/usr/bin/env python
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Note: running this test requires installing the package python-mock.
# pylint: disable=C0103
# pylint: disable=F0401
import PRESUBMIT
import os.path
import subprocess
import sys
import unittest
sys.path.append(
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'pymock'))
sys.path.append(
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..'))
import mock
from PRESUBMIT_test_mocks import MockInputApi
from PRESUBMIT_test_mocks import MockOutputApi
from PRESUBMIT_test_mocks import MockAffectedFile
class Capture(object):
"""Class to capture a call argument that can be tested later on."""
def __init__(self):
self.value = None
def __eq__(self, other):
self.value = other
return True
class PresubmitTest(unittest.TestCase):
@mock.patch('subprocess.Popen')
def testCheckChangeOnUploadWithBlinkAndChromiumFiles(self, _):
"""This verifies that CheckChangeOnUpload will only call check_blink_style.py
on non-test files.
"""
diff_file_blink_h = ['some diff']
diff_file_chromium_h = ['another diff']
diff_file_test_expectations = ['more diff']
mock_input_api = MockInputApi()
mock_input_api.files = [
MockAffectedFile('file_blink.h', diff_file_blink_h),
MockAffectedFile('file_chromium.h', diff_file_chromium_h),
MockAffectedFile('web_tests/TestExpectations', diff_file_test_expectations)
]
# Access to a protected member _CheckStyle
# pylint: disable=W0212
PRESUBMIT._CheckStyle(mock_input_api, MockOutputApi())
capture = Capture()
# pylint: disable=E1101
subprocess.Popen.assert_called_with(capture, stderr=-1)
self.assertEqual(6, len(capture.value))
self.assertEqual('../../file_blink.h', capture.value[3])
self.assertEqual('../../web_tests/TestExpectations', capture.value[5])
@mock.patch('subprocess.Popen')
def testCheckChangeOnUploadWithEmptyAffectedFileList(self, _):
"""This verifies that CheckChangeOnUpload will skip calling
check_blink_style.py if the affected file list is empty.
"""
diff_file_chromium1_h = ['some diff']
diff_file_chromium2_h = ['another diff']
diff_file_layout_test_html = ['more diff']
mock_input_api = MockInputApi()
mock_input_api.files = [
MockAffectedFile('first_file_chromium.h', diff_file_chromium1_h),
MockAffectedFile('second_file_chromium.h', diff_file_chromium2_h),
MockAffectedFile('LayoutTests/some_tests.html', diff_file_layout_test_html)
]
# Access to a protected member _CheckStyle
# pylint: disable=W0212
PRESUBMIT._CheckStyle(mock_input_api, MockOutputApi())
# pylint: disable=E1101
subprocess.Popen.assert_not_called()
def testCheckPublicHeaderWithBlinkMojo(self):
"""This verifies that _CheckForWrongMojomIncludes detects -blink mojo
headers in public files.
"""
mock_input_api = MockInputApi()
potentially_bad_content = '#include "public/platform/modules/cache_storage.mojom-blink.h"'
mock_input_api.files = [
MockAffectedFile('third_party/blink/public/a_header.h',
[potentially_bad_content], None)
]
# Access to a protected member _CheckForWrongMojomIncludes
# pylint: disable=W0212
errors = PRESUBMIT._CheckForWrongMojomIncludes(mock_input_api,
MockOutputApi())
self.assertEquals(
'Public blink headers using Blink variant mojoms found. ' +
'You must include .mojom-shared.h instead:',
errors[0].message)
def testCheckInternalHeaderWithBlinkMojo(self):
"""This verifies that _CheckForWrongMojomIncludes accepts -blink mojo
headers in blink internal files.
"""
mock_input_api = MockInputApi()
potentially_bad_content = '#include "public/platform/modules/cache_storage.mojom-blink.h"'
mock_input_api.files = [
MockAffectedFile('third_party/blink/renderer/core/a_header.h',
[potentially_bad_content], None)
]
# Access to a protected member _CheckForWrongMojomIncludes
# pylint: disable=W0212
errors = PRESUBMIT._CheckForWrongMojomIncludes(mock_input_api,
MockOutputApi())
self.assertEquals([], errors)
class CxxDependencyTest(unittest.TestCase):
allow_list = [
'gfx::ColorSpace',
'gfx::CubicBezier',
'gfx::ICCProfile',
'gfx::ScrollOffset',
'scoped_refptr<base::SingleThreadTaskRunner>',
]
disallow_list = [
'GURL',
'base::Callback<void()>',
'content::RenderFrame',
'gfx::Point',
'gfx::Rect',
'net::IPEndPoint',
'ui::Clipboard',
]
disallow_message = [
]
def runCheck(self, filename, file_contents):
mock_input_api = MockInputApi()
mock_input_api.files = [
MockAffectedFile(filename, file_contents),
]
# Access to a protected member
# pylint: disable=W0212
return PRESUBMIT._CheckForForbiddenChromiumCode(mock_input_api, MockOutputApi())
# References in comments should never be checked.
def testCheckCommentsIgnored(self):
filename = 'third_party/blink/renderer/core/frame/frame.cc'
for item in self.allow_list:
errors = self.runCheck(filename, ['// %s' % item])
self.assertEqual([], errors)
for item in self.disallow_list:
errors = self.runCheck(filename, ['// %s' % item])
self.assertEqual([], errors)
# References in Test files should never be checked.
def testCheckTestsIgnored(self):
filename = 'third_party/blink/rendere/core/frame/frame_test.cc'
for item in self.allow_list:
errors = self.runCheck(filename, ['// %s' % item])
self.assertEqual([], errors)
for item in self.disallow_list:
errors = self.runCheck(filename, ['// %s' % item])
self.assertEqual([], errors)
# core, modules, public, et cetera should all have dependency enforcement.
def testCheckCoreEnforcement(self):
filename = 'third_party/blink/renderer/core/frame/frame.cc'
for item in self.allow_list:
errors = self.runCheck(filename, ['%s' % item])
self.assertEqual([], errors)
for item in self.disallow_list:
errors = self.runCheck(filename, ['%s' % item])
self.assertEquals(1, len(errors))
self.assertRegexpMatches(
errors[0].message,
r'^[^:]+:\d+ uses disallowed identifier .+$')
def testCheckModulesEnforcement(self):
filename = 'third_party/blink/renderer/modules/modules_initializer.cc'
for item in self.allow_list:
errors = self.runCheck(filename, ['%s' % item])
self.assertEqual([], errors)
for item in self.disallow_list:
errors = self.runCheck(filename, ['%s' % item])
self.assertEquals(1, len(errors))
self.assertRegexpMatches(
errors[0].message,
r'^[^:]+:\d+ uses disallowed identifier .+$')
def testCheckPublicEnforcement(self):
filename = 'third_party/blink/renderer/public/platform/web_thread.h'
for item in self.allow_list:
errors = self.runCheck(filename, ['%s' % item])
self.assertEqual([], errors)
for item in self.disallow_list:
errors = self.runCheck(filename, ['%s' % item])
self.assertEquals(1, len(errors))
self.assertRegexpMatches(
errors[0].message,
r'^[^:]+:\d+ uses disallowed identifier .+$')
# platform and controller should be opted out of enforcement, but aren't
# currently checked because the PRESUBMIT test mocks are missing too
# much functionality...
# External module checks should not affect CSS files.
def testCheckCSSIgnored(self):
filename = 'third_party/blink/renderer/devtools/front_end/timeline/someFile.css'
errors = self.runCheck(filename, ['.toolbar::after { color: pink; }\n'])
self.assertEqual([], errors)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
a4a75d6f57c7d5507c1728eafd50f371d56dda12 | a4a5c6f185ed38ea4b93e49408f369f2ae7073e9 | /aliyun-python-sdk-iot/aliyunsdkiot/request/v20170420/PubBroadcastRequest.py | 2f067cac3538c6f3deb82417c2224f42bb8c9a2a | [
"Apache-2.0"
]
| permissive | samuelchen/aliyun-openapi-python-sdk | 86ee6eb9573e68cbf98ea61328818bfca005f25f | 52dda2326c34633858e4ed83a526dadce90dd5ef | refs/heads/master | 2020-03-07T03:50:18.248590 | 2018-04-02T13:48:10 | 2018-04-02T13:48:10 | 127,248,156 | 1 | 0 | null | 2018-03-29T06:39:31 | 2018-03-29T06:39:30 | null | UTF-8 | Python | false | false | 1,520 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class PubBroadcastRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Iot', '2017-04-20', 'PubBroadcast')
def get_TopicFullName(self):
return self.get_query_params().get('TopicFullName')
def set_TopicFullName(self,TopicFullName):
self.add_query_param('TopicFullName',TopicFullName)
def get_MessageContent(self):
return self.get_query_params().get('MessageContent')
def set_MessageContent(self,MessageContent):
self.add_query_param('MessageContent',MessageContent)
def get_ProductKey(self):
return self.get_query_params().get('ProductKey')
def set_ProductKey(self,ProductKey):
self.add_query_param('ProductKey',ProductKey) | [
"[email protected]"
]
| |
38221f887d5dd3d306c517976168c1ae095db6f3 | 978184a03ecf7b0fe60fe824606877e5ad340c25 | /G/exo_7.16.py | 77937247652c008c8d4fdeb8ad9c281afe6e6e2d | []
| no_license | zinsmatt/Kalman_Filter_MOOC | 9e88a84818c09e2d01ea102855b7334bc2d0800a | 01d3ae3a213e94f480338f0a10bea5663185f167 | refs/heads/master | 2023-01-27T12:22:54.117402 | 2020-12-09T10:18:28 | 2020-12-09T10:18:28 | 304,326,290 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,742 | py | from roblib import * # available at https://www.ensta-bretagne.fr/jaulin/roblib.py
def draw_invpend(ax,x, w): #inverted pendulum
s,θ=x[0,0],x[1,0]
draw_box(ax,s-0.7,s+0.7,-0.25,0,'blue')
plot( [s,s-sin(θ)],[0,cos(θ)],'magenta', linewidth = 2)
plt.plot(w, 0, "or")
mc,l,g,mr = 5,1,9.81,1
dt = 0.04
x = array([[0,0.4,0,0]]).T
Γα = (sqrt(dt)*(10**-3))**2*eye(4)
def f(x,u):
s,θ,ds,dθ=x[0,0],x[1,0],x[2,0],x[3,0]
dds=(mr*sin(θ)*(g*cos(θ)- l*dθ**2) + u)/(mc+mr*sin(θ)**2)
ddθ= (sin(θ)*((mr+mc)*g - mr*l*dθ**2*cos(θ)) + cos(θ)*u)/ (l*(mc+mr*sin(θ)**2))
return array([[ds],[dθ],[dds],[ddθ]])
ax=init_figure(-3,3,-3,3)
A = np.array([[0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 1.0],
[0.0, mr*g/mc, 0.0, 0.0],
[0.0, (mc+mr) * g / (l*mc), 0.0, 0.0]])
B = np.array([[0.0, 0.0, 1/mc, 1/(l*mc)]]).T
K = place_poles(A, B, [-2.0, -2.1, -2.2, -2.3]).gain_matrix
E = array([[1, 0, 0, 0]])
h = -inv(E @ inv(A - B @ K) @ B)
C = np.array([[1.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0]])
# L = place_poles(A.T, C.T, [-2.0, -2.1, -2.2, -2.3]).gain_matrix.T
xhat = np.zeros((4, 1))
Gx = eye(4)
Galpha = eye(4) * dt * 0.0001
Gbeta = 0.0001 * eye(2)
w = 2
for ti, t in enumerate(arange(0,10,dt)):
clear(ax)
draw_invpend(ax,x, w)
u = (-K @ xhat + h * w).item()
y = C @ x + 0.01 * randn(2, 1)
# Estimateur de Luenberger
# xhat = xhat + (A @ xhat + B * u - L @ (C @ xhat - y)) * dt
# Estimateur avec Kalman
xhat, Gx = kalman(xhat, Gx, dt * B * u, y, Galpha, Gbeta, eye(4 ) + dt * A, C)
α=mvnrnd1(Γα)
x = x + dt*f(x,u)+α
| [
"[email protected]"
]
| |
e673edd150a7a3f67dc655407b709b1e88bb1511 | 205096eef765375188a0e8fc2f634cc9595e3072 | /aged_partner_report/wizard/aged_partner_wizard.py | f0d73393a78834c25e4942d7fd61649ec488c8d3 | []
| no_license | xAlphaOmega/MCLNEW | 1ded905978df99be1867a142662cafda6318a993 | a2b9708ebd22e7f351e1fa9c40bfbc29532242b8 | refs/heads/master | 2021-01-26T07:09:25.643962 | 2019-12-18T14:03:04 | 2019-12-18T14:03:04 | 243,359,262 | 0 | 3 | null | 2020-02-26T20:30:23 | 2020-02-26T20:30:22 | null | UTF-8 | Python | false | false | 9,503 | py | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
from datetime import date, timedelta
import datetime
from odoo.exceptions import UserError
import pdb
class AgedPartnerReport(models.TransientModel):
_name = "aged.partner.wizard"
_description = "Aged Partner Report"
report_date = fields.Date('As Of')
@api.multi
def partner_count(self):
invoice_obj= self.env['account.invoice']
payment_obj= self.env['account.payment']
if self.env['aged.partner.report'].search([]):
for each in self.env['aged.partner.report'].search([]):
each.unlink()
payment_ids = payment_obj.search([('partner_type','=','customer'),('state','=','posted')])
for payment in payment_ids:
account_move_line_id = self.env['account.move.line'].search([('payment_id','=',payment.id),('amount_residual','<',0)])
date_30 = datetime.datetime.strptime((self.report_date-timedelta(days=30)).isoformat(), '%Y-%m-%d').date()
date_60 = datetime.datetime.strptime((self.report_date-timedelta(days=60)).isoformat(), '%Y-%m-%d').date()
date_90 = datetime.datetime.strptime((self.report_date-timedelta(days=90)).isoformat(), '%Y-%m-%d').date()
date_120 = datetime.datetime.strptime((self.report_date-timedelta(days=120)).isoformat(), '%Y-%m-%d').date()
date_180 = datetime.datetime.strptime((self.report_date-timedelta(days=180)).isoformat(), '%Y-%m-%d').date()
print(payment,account_move_line_id)
# pdb.set_trace()
# if len(account_move_line_id) >=1:
for each in account_move_line_id:
if each.id:
sub_tot_no_due = each.amount_residual if each.date >= self.report_date else 0.0
sub_tot_30 = each.amount_residual if each.date >= date_30 and each.date < self.report_date else 0.0
sub_tot_60 = each.amount_residual if each.date >= date_60 and each.date < date_30 else 0.0
sub_tot_90 = each.amount_residual if each.date >= date_90 and each.date < date_60 else 0.0
sub_tot_120 = each.amount_residual if each.date >= date_120 and each.date < date_90 else 0.0
sub_tot_180 = each.amount_residual if each.date >= date_180 and each.date < date_120 else 0.0
sub_tot_max = each.amount_residual if each.date <date_180 else 0.0
total_amount = sub_tot_no_due+sub_tot_30+sub_tot_60+sub_tot_90+sub_tot_120+sub_tot_180+sub_tot_max
new_pay_id = self.env['aged.partner.report'].create({
'partner_id':payment.partner_id.id,
'ref':payment.name,
'due_date':payment.payment_date,
'sub_tot_no_due': sub_tot_no_due,
'sub_tot_30': sub_tot_30,
'sub_tot_60': sub_tot_60,
'sub_tot_90': sub_tot_90,
'sub_tot_120': sub_tot_120,
'sub_tot_180': sub_tot_180,
'older_amount': sub_tot_max,
'total_amount': total_amount,
})
for invoice in invoice_obj.search([('type','=', 'out_invoice'),('residual','>',0)]):
date_30 = datetime.datetime.strptime((self.report_date-timedelta(days=30)).isoformat(), '%Y-%m-%d').date()
date_60 = datetime.datetime.strptime((self.report_date-timedelta(days=60)).isoformat(), '%Y-%m-%d').date()
date_90 = datetime.datetime.strptime((self.report_date-timedelta(days=90)).isoformat(), '%Y-%m-%d').date()
date_120 = datetime.datetime.strptime((self.report_date-timedelta(days=120)).isoformat(), '%Y-%m-%d').date()
date_180 = datetime.datetime.strptime((self.report_date-timedelta(days=180)).isoformat(), '%Y-%m-%d').date()
due_30_amount= due_60_amount= due_90_amount= due_120_amount= due_180_amount= due_max_amount= no_due_amount=sub_tot_max= 0
# if invoice.id == 4:
# pdb.set_trace()
sub_tot_no_due = invoice.residual if invoice.date_due >= self.report_date else 0.0
sub_tot_30 = invoice.residual if invoice.date_due >= date_30 and invoice.date_due < self.report_date else 0.0
sub_tot_60 = invoice.residual if invoice.date_due >= date_60 and invoice.date_due < date_30 else 0.0
sub_tot_90 = invoice.residual if invoice.date_due >= date_90 and invoice.date_due < date_60 else 0.0
sub_tot_120 = invoice.residual if invoice.date_due >= date_120 and invoice.date_due < date_90 else 0.0
sub_tot_180 = invoice.residual if invoice.date_due >= date_180 and invoice.date_due < date_120 else 0.0
sub_tot_max = invoice.residual if invoice.date_due <date_180 else 0.0
total_amount = sub_tot_no_due+sub_tot_30+sub_tot_60+sub_tot_90+sub_tot_120+sub_tot_180+sub_tot_max
# due_30_amount = invoice.residual if invoice.date_due >=+sub_tot_90 date_30 and invoice.date_due < self.report_date else 0.0
# sub_tot_no_due = invoice.residual if invoice.date_due >= self.report_date else 0.0
# no_due_invoice_ids = invoice_obj.search([('id','=', invoice.id),('date_due','>=',self.report_date)])
# no_due_amount =0.0
# if no_due_invoice_ids:
# for each_no in no_due_invoice_ids:
# no_due_amount += each_no.residual
# if not no_due_invoice_ids:
# due_30_invoice_ids = invoice_obj.search([('id','=', invoice.id),('date_due','>=',date_30)])
# due_60_invoice_ids = invoice_obj.search([('id','=', invoice.id),('date_due','>',date_30),('date_due','<=',date_60),])
# due_90_invoice_ids = invoice_obj.search([('id','=', invoice.id),('date_due','>',date_60),('date_due','<=',date_90),])
# due_120_invoice_ids =invoice_obj.search([('id','=', invoice.id),('date_due','>',date_90),('date_due','<=',date_120),])
# due_180_invoice_ids =invoice_obj.search([('id','=', invoice.id),('date_due','>',date_120),('date_due','<=',date_180),])
# due_max_invoice_ids =invoice_obj.search([('id','=', invoice.id),('date_due','>',date_180)])
# # due_30_invoice_ids = invoice_obj.search([('date_due','=',self.report_date),('type','=', 'out_invoice')])
# # due_invoice_ids= due_30_invoice_ids+daysue_60_invoice_ids+due_90_invoice_ids+due_120_invoice_ids+due_120_invoice_ids+due_180_invoice_ids
# if due_30_invoice_ids:
# for each_30 in due_30_invoice_ids:
# due_30_amount += each_30.residual
# if due_60_invoice_ids:
# for each_60 in due_60_invoice_ids:
# due_60_amount += each_60.residual
# if due_90_invoice_ids:
# for each_90 in due_90_invoice_ids:
# due_90_amount += each_90.residual
# if due_120_invoice_ids:
# for each_120 in due_120_invoice_ids:
# due_120_amount += each_120.residual
# if due_180_invoice_ids:
# for each_180 in due_180_invoice_ids:
# due_180_amount += each_180.residual
# if due_max_invoice_ids:
# for each_max in due_max_invoice_ids:
# due_max_amount += each_max.residual
new_id = self.env['aged.partner.report'].create({
'partner_id':invoice.partner_id.id,
'ref':invoice.number,
'sales_persion_id':invoice.user_id.id,
'sales_team_id':invoice.team_id.id,
'due_date':invoice.date_due,
'sub_tot_no_due': sub_tot_no_due,
'sub_tot_30': sub_tot_30,
'sub_tot_60': sub_tot_60,
'sub_tot_90': sub_tot_90,
'sub_tot_120': sub_tot_120,
'sub_tot_180': sub_tot_180,
'older_amount': sub_tot_max,
'total_amount': total_amount,
})
tree_view_id = self.env.ref('aged_partner_report.view_aged_partner_tree').id
# from_view_id = self.env.ref('production_allocation_report.view_assigned_employee_form').id
return {
'name': 'Aged Partner',
# 'view_type': 'form',
'view_mode': 'tree',
'views': [(tree_view_id, 'tree')],
'res_model': 'aged.partner.report',
'type': 'ir.actions.act_window',
'target': 'current',
'context': dict(self.env.context)
}
| [
"[email protected]"
]
| |
bf2556e4a09d1f8d8208b65c9bcf88234a143a89 | d529b72eb4610ddf0e0b8170354a21f87dbf0e42 | /Unit19/involved.py | 834b8689f1666e416494769723984c5bb04d1bc3 | []
| no_license | SaretMagnoslove/Udacity-CS101 | 57a8b6609e2f2a09135ea0189a782d66e225b641 | 2e573a362a4d8d688199777937c6aaff59f6b900 | refs/heads/master | 2021-04-15T17:43:05.522259 | 2018-06-20T23:33:26 | 2018-06-20T23:33:26 | 126,618,278 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,594 | py | # Dictionaries of Dictionaries (of Dictionaries)
# The next several questions concern the data structure below for keeping
# track of Udacity's courses (where all of the values are strings):
# { <hexamester>, { <class>: { <property>: <value>, ... },
# ... },
# ... }
# For example,
courses = {
'feb2012': { 'cs101': {'name': 'Building a Search Engine',
'teacher': 'Dave',
'assistant': 'Peter C.'},
'cs373': {'name': 'Programming a Robotic Car',
'teacher': 'Sebastian',
'assistant': 'Andy'}},
'apr2012': { 'cs101': {'name': 'Building a Search Engine',
'teacher': 'Dave',
'assistant': 'Sarah'},
'cs212': {'name': 'The Design of Computer Programs',
'teacher': 'Peter N.',
'assistant': 'Andy',
'prereq': 'cs101'},
'cs253':
{'name': 'Web Application Engineering - Building a Blog',
'teacher': 'Steve',
'prereq': 'cs101'},
'cs262':
{'name': 'Programming Languages - Building a Web Browser',
'teacher': 'Wes',
'assistant': 'Peter C.',
'prereq': 'cs101'},
'cs373': {'name': 'Programming a Robotic Car',
'teacher': 'Sebastian'},
'cs387': {'name': 'Applied Cryptography',
'teacher': 'Dave'}},
'jan2044': { 'cs001': {'name': 'Building a Quantum Holodeck',
'teacher': 'Dorina'},
'cs003': {'name': 'Programming a Robotic Robotics Teacher',
'teacher': 'Jasper'},
}
}
# For the following questions, you will find the
# for <key> in <dictionary>:
# <block>
# construct useful. This loops through the key values in the Dictionary. For
# example, this procedure returns a list of all the courses offered in the given
# hexamester:
def courses_offered(courses, hexamester):
res = []
for c in courses[hexamester]:
res.append(c)
return res
# [Double Gold Star] Define a procedure, involved(courses, person), that takes
# as input a courses structure and a person and returns a Dictionary that
# describes all the courses the person is involved in. A person is involved
# in a course if they are a value for any property for the course. The output
# Dictionary should have hexamesters as its keys, and each value should be a
# list of courses that are offered that hexamester (the courses in the list
# can be in any order).
def involved(courses, person):
d = {}
for hexa in courses.keys():
for course in courses[hexa].keys():
if person in courses[hexa][course].values():
if hexa in d:
d[hexa].append(course)
else:
d[hexa] = [course]
return d
# For example:
print (involved(courses, 'Dave'))
#>>> {'apr2012': ['cs101', 'cs387'], 'feb2012': ['cs101']}
#print involved(courses, 'Peter C.')
#>>> {'apr2012': ['cs262'], 'feb2012': ['cs101']}
#print involved(courses, 'Dorina')
#>>> {'jan2044': ['cs001']}
#print involved(courses,'Peter')
#>>> {}
#print involved(courses,'Robotic')
#>>> {}
#print involved(courses, '')
#>>> {}
| [
"[email protected]"
]
| |
15f51480656364cc0aedcabaf36127e26ac783fb | 79e630cbbbeca74d8c1fab822d3d854518a7e7ca | /hanmaum/urls.py | 9836efddaaec98334f3c756a1f8199bdd031699c | []
| no_license | Son-GyeongSik/hanalum_web | f573f936068ba24b2215a74efdbb9e8f4b0ff9f8 | 3669b1d3c108c2aa64a7d0f11116adc0f385ce83 | refs/heads/main | 2023-03-24T08:16:24.098445 | 2021-03-14T05:06:46 | 2021-03-14T05:06:46 | 351,105,953 | 1 | 0 | null | 2021-03-24T14:16:35 | 2021-03-24T14:16:35 | null | UTF-8 | Python | false | false | 680 | py | """hanmaum 관련 urls 정의 파일입니다."""
from django.urls import path
from .views import (
edit, index, new, show, introduce, like, dislike, cancle, new_comment
)
app_name = 'hanmaum'
urlpatterns = [
path('', index, name="index"),
path('<int:article_id>', show, name="show"),
path('show/<int:article_id>', show, name="show"),
path('new', new, name="new"),
path('edit', edit, name="edit"),
path('introduce', introduce, name="introduce"),
path('like', like, name="like"),
path('dislike', dislike, name="dislike"),
path('cancle', cancle, name="cancle"),
path('<int:article_id>/comment/new/', new_comment, name="new_comment")
]
| [
"[email protected]"
]
| |
d73707052b010a015388947c1705c99bb8ae15ec | 248c535f3612c646bccadecafdca649fd788bb1f | /.history/app/models_20210927050430.py | 68917ed4d52d586d0562e0b5cd1b52726d578030 | [
"MIT"
]
| permissive | GraceOswal/pitch-perfect | 3b923e4de5fff1a405dcb54374a1ba0522232025 | d781c6e0f55c11f2a5e5dceb952f6b2de3c47c3b | refs/heads/master | 2023-08-16T01:42:18.742154 | 2021-10-01T06:59:11 | 2021-10-01T06:59:11 | 410,224,294 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 217 | py | from . import db
# connect class user to pitchperfect database
class User(db.Model):
__table__ = 'users'
id = db.Column(db.Integer,primary_key = True)
username = db.Column(db.String(255))
def __repr | [
"[email protected]"
]
| |
2ceafa22468a3657f444a3f7565a74038f94664c | 5199d37699c7c104cd9b00ffecad8f70d0f4f203 | /test_mean.py | 0a5dc585e4ac6a50486572913654dcc0e61bd20c | [
"CC-BY-4.0",
"MIT"
]
| permissive | Becksteinlab/workshop_testing | 81b9a85f7a002e6f78666138a9959d51f6949ec1 | e4ee392e6e9bd1f7c429290b8820cfd06a512032 | refs/heads/master | 2020-03-25T12:31:27.376484 | 2018-08-07T00:28:51 | 2018-08-07T00:28:51 | 143,780,267 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 492 | py | import pytest
from mean import mean
def test_ints():
num_list = [1,2,3,4,5]
obs = mean(num_list)
exp = 3
assert obs == exp
def test_zero():
num_list=[0,2,4,6]
obs = mean(num_list)
exp = 3
assert obs == exp
def test_double():
# This one will fail in Python 2
num_list = [1,2,3,4]
obs = mean(num_list)
exp = 2.5
assert obs == exp
def test_long():
big = 100000000
obs = mean(range(1,big))
exp = big/2.0
assert obs == exp
| [
"[email protected]"
]
| |
d4dd8070546b4c3ce4a0fb08d5906e255dcdbe45 | 5bdf195972deec9378d14d1ba37994c0cae9ad7b | /dash-example/data.py | 1489c30d3b5c1e72c23352d1836d108e590bc256 | [
"BSD-2-Clause"
]
| permissive | SoftwareDefinedBuildings/mortar-analytics | 42b40067b2c6056430b0cd11889993a10b8428a7 | df48efca45ab2636f53c3b7301bcaa21b6c4e91f | refs/heads/master | 2023-06-08T04:02:21.675221 | 2022-06-26T00:35:46 | 2022-06-26T00:35:46 | 168,413,215 | 20 | 10 | BSD-2-Clause | 2023-02-15T21:33:04 | 2019-01-30T20:50:29 | Python | UTF-8 | Python | false | false | 1,841 | py | import pymortar
import os
import pandas as pd
# use default values (environment variables):
# MORTAR_API_ADDRESS: mortardata.org:9001
# MORTAR_API_USERNAME: required
# MORTAR_API_PASSWORD: required
client = pymortar.Client({})
meter_query = "SELECT ?meter WHERE { ?meter rdf:type/rdfs:subClassOf* brick:Building_Electric_Meter };"
# run qualify stage to get list of sites with electric meters
resp = client.qualify([meter_query])
if resp.error != "":
print("ERROR: ", resp.error)
os.exit(1)
print("running on {0} sites".format(len(resp.sites)))
# define the view of meters (metadata)
meters = pymortar.View(
sites=resp.sites,
name="meters",
definition=meter_query,
)
# define the meter timeseries streams we want
meter_data = pymortar.DataFrame(
name="meters",
aggregation=pymortar.MEAN,
window="1h",
timeseries=[
pymortar.Timeseries(
view="meters",
dataVars=["?meter"]
)
]
)
# temporal parameters for the query: 2017-2018 @ 15min mean
time_params = pymortar.TimeParams(
start="2015-01-01T00:00:00Z",
end="2018-01-01T00:00:00Z",
)
# form the full request object
request = pymortar.FetchRequest(
sites=resp.sites,
views=[meters],
dataFrames=[meter_data],
time=time_params
)
# download the data
print("Starting to download data...")
data = client.fetch(request)
# compute daily min/max/mean for each site
ranges = []
for site in resp.sites:
meter_uuids = data.query("select meter_uuid from meters where site='{0}'".format(site))
meter_uuids = [row[0] for row in meter_uuids]
meterdf = data['meters'][meter_uuids].sum(axis=1)
ranges.append( [site, meterdf.min(), meterdf.max(), meterdf.mean()])
site_summary = pd.DataFrame.from_records(ranges)
site_summary.columns = ['site','min_daily','max_daily','mean_daily']
| [
"[email protected]"
]
| |
e7e8676236a60acdb5e6bee1d75bc7710446e73e | bcd711985fe4381f1599b797e6048a27e357f8d1 | /master/action/task_action.py | 938c94dd4ff23cef9563800f6f3c3c0681fe8dd6 | []
| no_license | No-bb-just-do-it/distributed-spider | 5c02847604350f404ca0a1eeea64c0d6e6c7aad8 | e8bf92a742968eb3c7acaede138132cd6ebe18f4 | refs/heads/master | 2020-03-20T22:56:02.742602 | 2018-05-30T03:33:08 | 2018-05-30T03:33:08 | 137,821,517 | 0 | 1 | null | 2018-06-19T00:40:40 | 2018-06-19T00:40:39 | null | UTF-8 | Python | false | false | 1,079 | py | # -*- coding: utf-8 -*-
'''
Created on 2017-12-08 13:52
---------
@summary:
---------
@author: Boris
'''
import sys
sys.path.append('..')
from utils.log import log
import utils.tools as tools
import web
import json
from service.task_service import TaskService
class TaskAction():
def __init__(self):
self.task_service = TaskService()
def deal_request(self, name):
web.header('Content-Type','text/html;charset=UTF-8')
data = json.loads(json.dumps(web.input()))
print(name)
print(data)
if name == 'get_task':
tasks = self.task_service.get_task()
return tools.dumps_json(tasks)
elif name == 'update_task':
tasks = eval(data.get('tasks', []))
status = data.get('status')
self.task_service.update_task_status(tasks, status)
return tools.dumps_json('{"status":1}')
def GET(self, name):
return self.deal_request(name)
def POST(self, name):
return self.deal_request(name)
| [
"[email protected]"
]
| |
4fd550bd7e17bdd66d350b97ce999f08dd31e922 | 7408dd6c91e601133ca6971d84639ce1b4f18622 | /Wikipedia/config.py | 2988e0481aa933b0e9d36b292f7e0b24f9546020 | []
| no_license | theY4Kman/Yakbot-plugins | 72370cff674335e45f18b27418bc5f0cb87f62ca | faac0bd4fb2599c8adf5aab583ce986aafa037c7 | refs/heads/master | 2021-01-01T19:21:03.117097 | 2012-09-17T03:27:57 | 2012-09-17T03:27:57 | 1,045,656 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,600 | py | #!/usr/bin/env python
# =============================================================================
# Wikipedia
# Copyright (C) 2009 Zach "theY4Kman" Kanzler
# =============================================================================
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License, version 3.0, as published by the
# Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
import supybot.conf as conf
import supybot.registry as registry
def configure(advanced):
# This will be called by supybot to configure this module. advanced is
# a bool that specifies whether the user identified himself as an advanced
# user or not. You should effect your configuration by manipulating the
# registry as appropriate.
from supybot.questions import expect, anything, something, yn
conf.registerPlugin('Wikipedia', True)
Wikipedia = conf.registerPlugin('Wikipedia')
# This is where your configuration variables (if any) should go. For example:
# conf.registerGlobalValue(Wikipedia, 'someConfigVariableName',
# registry.Boolean(False, """Help for someConfigVariableName."""))
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
| [
"[email protected]"
]
| |
671b01f557ba3809a0c9a20394d172e2fced8c3a | 42e4cc242a2be105dae0288d02a08fbd95bb476a | /deepspeed/ops/sparse_attention/sparse_self_attention.py | 6e7d8905e0a806b09216e3c82414f2737c24e7bc | [
"MIT",
"LicenseRef-scancode-generic-cla"
]
| permissive | afiaka87/DeepSpeed | a49ca48a410190b631a78c392c25359ed4478577 | 83ff549aa365d4fee744074ac0a64f27571ecbc8 | refs/heads/main | 2023-04-14T16:22:37.595209 | 2021-04-12T09:20:06 | 2021-04-12T09:20:06 | 356,466,212 | 2 | 0 | MIT | 2021-04-12T09:20:07 | 2021-04-10T04:09:31 | Python | UTF-8 | Python | false | false | 6,794 | py | """
Copyright 2020 The Microsoft DeepSpeed Team
"""
import torch.nn as nn
from torch.nn.functional import *
import torch
from torch import distributed as dist
from collections import namedtuple
from deepspeed.ops.sparse_attention import MatMul, Softmax, SparsityConfig
import sys
class SparseSelfAttention(nn.Module):
"""Implements an efficient Sparse Self Attention of Transformer layer based on `Generative Modeling with Sparse Transformers`: https://arxiv.org/abs/1904.10509
For more information please see, TODO DeepSpeed Sparse Transformer.
For usage example please see, TODO DeepSpeed Sparse Transformer Tutorial.
"""
def __init__(
self,
# SparsityConfig parameters needs to be set accordingly
sparsity_config=SparsityConfig(num_heads=4),
key_padding_mask_mode='add',
attn_mask_mode='mul',
max_seq_length=2048):
"""Initialize the sparse self attention layer.
Arguments:
sparsity_config: optional: this parameter determins sparsity pattern configuration; it is based on SparsityConfig class.
key_padding_mask_mode: optional: a string determining if key padding mask needs to be added, `add`, or be multiplied, `mul`.
attn_mask_mode: optional: a string determining if attention mask needs to be added, `add`, or be multiplied, `mul`.
max_seq_length: optional: the maximum sequence length this sparse attention module will be applied to; it controls the size of the master_layout.
"""
super().__init__()
# sparsity information
self.sparsity_config = sparsity_config
# initialize sparse layout and register as buffer
master_layout = self.sparsity_config.make_layout(max_seq_length)
self.register_buffer("master_layout", master_layout)
self._need_layout_synchronization = True
# mask modes
self.key_padding_mask_mode = key_padding_mask_mode
self.attn_mask_mode = attn_mask_mode
ops = dict()
def get_layout(self, L):
# if layout is never synchronized across GPUs, broadcast the layout from global rank 0
if self._need_layout_synchronization and dist.is_initialized():
dist.broadcast(self.master_layout, src=0)
self._need_layout_synchronization = False
if (L % self.sparsity_config.block != 0):
raise ValueError(
f'Sequence Length, {L}, needs to be dividable by Block size {self.sparsity_config.block}!'
)
num_blocks = L // self.sparsity_config.block
return self.master_layout[..., :num_blocks, :num_blocks].cpu() # layout needs to be a CPU tensor
# add to cache
def get_ops(self, H, L):
import sys
if L not in SparseSelfAttention.ops:
sparsity_layout = self.get_layout(L)
sparse_dot_sdd_nt = MatMul(sparsity_layout,
self.sparsity_config.block,
'sdd',
trans_a=False,
trans_b=True)
sparse_dot_dsd_nn = MatMul(sparsity_layout,
self.sparsity_config.block,
'dsd',
trans_a=False,
trans_b=False)
sparse_softmax = Softmax(sparsity_layout, self.sparsity_config.block)
SparseSelfAttention.ops[L] = (sparse_dot_sdd_nt,
sparse_dot_dsd_nn,
sparse_softmax)
return SparseSelfAttention.ops[L]
def transpose_key_for_scores(self, x, L):
bsz, num_heads, seq_len, head_dim = x.size()
if seq_len != L:
return x.permute(0, 1, 3, 2)
return x
def transpose_mask_for_sparse(self, qtype, x, is_key_padding_mask=False):
x = x.type(qtype)
if is_key_padding_mask:
xdim = x.dim()
for d in range(xdim - 1, 0, -1):
x = x.squeeze(dim=d)
return x
return x.squeeze()
# forward pass
def forward(self,
query,
key,
value,
rpe=None,
key_padding_mask=None,
attn_mask=None):
"""Applies forward phase of sparse self attention
Arguments:
query: required: query tensor
key: required: key tensor
value: required: value tensor
rpe: optional: a tensor same dimension as x that is used as relative position embedding
key_padding_mask: optional: a mask tensor of size (BatchSize X SequenceLength)
attn_mask: optional: a mask tensor of size (SequenceLength X SequenceLength); currently only 2D is supported
key_padding_mask_mode: optional: a boolean determining if key_padding_mask needs to be added or multiplied
attn_mask_mode: optional: a boolean determining if attn_mask needs to be added or multiplied
Return:
attn_output: a dense tensor containing attnetion context
"""
bsz, num_heads, tgt_len, head_dim = query.size()
# transpose back key if it is already transposed
key = self.transpose_key_for_scores(key, tgt_len)
# check that operation is supported
if query.shape != key.shape or key.shape != value.shape:
raise NotImplementedError('only self-attention is supported for now')
# squeeze key_padding_mask if it is given
if key_padding_mask is not None:
key_padding_mask = self.transpose_mask_for_sparse(query.dtype,
key_padding_mask,
is_key_padding_mask=True)
# squeeze attn_mask if it is given
if attn_mask is not None:
attn_mask = self.transpose_mask_for_sparse(query.dtype, attn_mask)
# cache look-up table computations etc
sparse_dot_sdd_nt, sparse_dot_dsd_nn, sparse_softmax = self.get_ops(num_heads, tgt_len)
scaling = float(head_dim)**-0.5
# attention scores
attn_output_weights = sparse_dot_sdd_nt(query, key)
attn_output_weights = sparse_softmax(
attn_output_weights,
scale=scaling,
rpe=rpe,
key_padding_mask=key_padding_mask,
attn_mask=attn_mask,
key_padding_mask_mode=self.key_padding_mask_mode,
attn_mask_mode=self.attn_mask_mode)
# outputs
attn_output = sparse_dot_dsd_nn(attn_output_weights, value)
return attn_output
| [
"[email protected]"
]
| |
9372e89548779bcfa0783d3c99173d8509b38650 | 9b9f7546c9d4396bae7d9065b81b8c6c163b9a1d | /lectures/physics/old/NumericalIntegration001.py | 37607cf7354c619886000ad237a8df55ca0777eb | []
| no_license | geo7/csci321 | 60db9454fab00fc63624a4fc32c4dd47f02fda41 | 527744c8d76c5c4aceb07e23a1ec3127be305641 | refs/heads/master | 2020-12-28T14:50:17.267837 | 2015-06-03T19:18:53 | 2015-06-03T19:18:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,583 | py | import numpy as N
import pygame, time
from pygame.locals import *
from pygame.color import *
import numpy as N
from particlesystem import Particle, ParticleSystem
pygame.init()
screen = pygame.display.set_mode((640,480))
background = pygame.Surface(screen.get_size())
background.fill((128,128,255))
def drag(k):
def func(psystem):
for p in psystem.p:
p.f += -k*p.v
return func
def spring(k, center):
def func(psystem):
for p in psystem.p:
p.f += -k*(p.x - center)
return func
def main():
plotTime = 1
myforces = [spring(0.1, N.array((320.0, 240.0, 0.0))),
drag(0.05)]
mypositions = [N.random.random(3)*200.0 for i in range(10)]
myparticles = [Particle(1.0, x, x-x) for x in mypositions]
mysystem = ParticleSystem(myparticles)
clock = pygame.time.Clock()
running = 1
deltaT = 0.1
screen.blit(background, (0,0))
while running:
clock.tick(60)
for event in pygame.event.get():
if event.type == QUIT:
running = 0
elif event.type == KEYDOWN and event.key == K_ESCAPE:
running = 0
mysystem.EulerStep(myforces, deltaT)
if plotTime:
mysystem.Draw(screen, time=True)
else:
screen.blit(background, (0,0))
mysystem.Draw(screen)
pygame.display.flip()
if __name__ == "__main__":
try:
main()
finally:
pygame.quit()
| [
"[email protected]"
]
| |
c1ec37951f61167493d80dc54208e1b802e5e123 | 45c685884bdb42fb4bf1c2b9e51a9dd732ecc9bb | /component/my_k8s.py | 9bc2db91ea198c836b07a774b0fa7ffc859e656b | []
| no_license | tristan-tsl/devops-demo | dffdb8dac2bf2be7e02bb44e889a16dbdeba5a6b | 369fc1b8458741d7642e280da9a3e283010535b0 | refs/heads/master | 2022-12-26T09:43:07.984118 | 2020-10-02T14:24:15 | 2020-10-02T14:24:15 | 217,187,450 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,305 | py | # def update_image(url, namespace, service_name, image_id, base_path, username, password):
# return "更新镜像成功"
"""
参考文档: https://k8smeetup.github.io/docs/tasks/administer-cluster/access-cluster-api/
# 得到 apiserver的信息(地址和token)
kubectl config view
"""
from kubernetes import client
# aToken = "QmFzaWMgTlVaRE5qY3hRVUkzTWtJeE16bEdNelZHTkVJNlpqUlRXbTFXY2paclpWTjZPVGxvUWxCMVRHcEtiVlpFTVV4cFVrMHlUVkJTYlRsTmRVWTBUUT09"
# aConfiguration = client.Configuration()
# aConfiguration.host = "https://192.168.71.223:8765/r/projects/1a5/kubernetes:6443"
# aConfiguration.verify_ssl = False
# aConfiguration.api_key = {"authorization": "Bearer " + aToken}
# aApiClient = client.ApiClient(aConfiguration)
#
# # 更新pod的镜像id
# deployment_name = "servicemail"
# namespace = "default"
# image_id = ""
#
# apps_v1beta1 = client.AppsV1beta1Api(aApiClient)
# deployment_data = apps_v1beta1.read_namespaced_deployment(namespace=namespace, name=deployment_name)
# print(deployment_data)
# deployment_data.spec.template.spec.containers[
# 0].image = image_id
# api_response = apps_v1beta1.patch_namespaced_deployment(
# name=deployment_name,
# namespace=namespace,
# body=deployment_data)
# print(api_response)
class MyK8s(object):
def __init__(self, host, token):
a_configuration = client.Configuration()
a_configuration.host = host
a_configuration.verify_ssl = False
a_configuration.api_key = {"authorization": "Bearer " + token}
a_api_client = client.ApiClient(a_configuration)
apps_v1beta1 = client.AppsV1beta1Api(a_api_client)
self.apps_v1beta1 = apps_v1beta1
def update_image(self, namespace, name, image_id):
deployment_data = self.apps_v1beta1.read_namespaced_deployment(namespace=namespace, name=name)
deployment_data.spec.template.spec.containers[0].image = image_id
self.apps_v1beta1.patch_namespaced_deployment(
name=name,
namespace=namespace,
body=deployment_data)
return "更新镜像成功"
def get_cur_image_id(self, namespace, name):
deployment_data = self.apps_v1beta1.read_namespaced_deployment(namespace=namespace, name=name)
return deployment_data.spec.template.spec.containers[0].image
| [
"[email protected]"
]
| |
1f225e11537f86d1dd4e294e32c67452d7e14b3b | 2af6a5c2d33e2046a1d25ae9dd66d349d3833940 | /res_bw/scripts/common/lib/encodings/johab.py | 26ff76bdd673424c1926f41d0305e01c233650fb | []
| no_license | webiumsk/WOT-0.9.12-CT | e6c8b5bb106fad71b5c3056ada59fb1aebc5f2b2 | 2506e34bd6634ad500b6501f4ed4f04af3f43fa0 | refs/heads/master | 2021-01-10T01:38:38.080814 | 2015-11-11T00:08:04 | 2015-11-11T00:08:04 | 45,803,240 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 1,147 | py | # 2015.11.10 21:35:52 Střední Evropa (běžný čas)
# Embedded file name: scripts/common/Lib/encodings/johab.py
import _codecs_kr, codecs
import _multibytecodec as mbc
codec = _codecs_kr.getcodec('johab')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(name='johab', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter)
# okay decompyling c:\Users\PC\wotsources\files\originals\res_bw\scripts\common\lib\encodings\johab.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2015.11.10 21:35:52 Střední Evropa (běžný čas)
| [
"[email protected]"
]
| |
d1b20f9a7480772ab77e15a32114eb4f078ac4c3 | 42b324291b51b14e284a8c5e14270a4e9446737a | /test50.py | 044d8d1e1b056372dbbabfbd11ebdd9fe49e77e1 | []
| no_license | christinecoco/python_test | 3f7505c85711eb6bff27cbc68bfd3fd9829a843d | 6d6c519e237f1d9e7243e3e6378a0ca44af98439 | refs/heads/master | 2020-05-23T22:26:58.341688 | 2019-05-16T07:23:28 | 2019-05-16T07:23:28 | 186,973,896 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 184 | py | #输出一个随机数
import random
print (random.random())#输出0~1之间的随机数
print(random.uniform(10,100))#输出随机数
print(random.randint(12,88))#输出随机整数 | [
"[email protected]"
]
| |
5088263a12593b85007e826590846f8e1201ab69 | f3e0cc142e77445b286234fc751ecae219b75458 | /pyBLE-NRF52840/2.传感器实验/9.MPU6050/lib/adafruit_mpu6050.py | c8fdfa89b7aa49ac5d364822bd1d8bbd85d4ec26 | [
"MIT"
]
| permissive | zhangweiIOT/MicroPython_Examples | 9389528b9dd6c8cd8c4f8932b95252a23de684ae | f06a1bee398674ceafebed2aac88d8413cc8abad | refs/heads/master | 2023-08-25T13:45:36.924345 | 2021-10-26T06:49:41 | 2021-10-26T06:49:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,128 | py | # The MIT License (MIT)
#
# Copyright (c) 2019 Bryan Siepert for Adafruit Industries
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
`adafruit_mpu6050`
================================================================================
CircuitPython helper library for the MPU6050 6-DoF Accelerometer and Gyroscope
* Author(s): Bryan Siepert
Implementation Notes
--------------------
**Hardware:**
* Adafruit's MPU6050 Breakout: https://adafruit.com/products/3886
**Software and Dependencies:**
* Adafruit CircuitPython firmware for the supported boards:
https://github.com/adafruit/circuitpython/releases
* Adafruit's Bus Device library: https://github.com/adafruit/Adafruit_CircuitPython_BusDevice
* Adafruit's Register library: https://github.com/adafruit/Adafruit_CircuitPython_Register
"""
# imports
__version__ = "1.1.1"
__repo__ = "https://github.com/adafruit/Adafruit_CircuitPython_MPU6050.git"
from time import sleep
from adafruit_register.i2c_struct import UnaryStruct, ROUnaryStruct
from adafruit_register.i2c_struct_array import StructArray
from adafruit_register.i2c_bit import RWBit
from adafruit_register.i2c_bits import RWBits
import adafruit_bus_device.i2c_device as i2c_device
# pylint: disable=bad-whitespace
_MPU6050_DEFAULT_ADDRESS = 0x68 # MPU6050 default i2c address w/ AD0 low
_MPU6050_DEVICE_ID = 0x68 # The correct MPU6050_WHO_AM_I value
_MPU6050_SELF_TEST_X = 0x0D # Self test factory calibrated values register
_MPU6050_SELF_TEST_Y = 0x0E # Self test factory calibrated values register
_MPU6050_SELF_TEST_Z = 0x0F # Self test factory calibrated values register
_MPU6050_SELF_TEST_A = 0x10 # Self test factory calibrated values register
_MPU6050_SMPLRT_DIV = 0x19 # sample rate divisor register
_MPU6050_CONFIG = 0x1A # General configuration register
_MPU6050_GYRO_CONFIG = 0x1B # Gyro specfic configuration register
_MPU6050_ACCEL_CONFIG = 0x1C # Accelerometer specific configration register
_MPU6050_INT_PIN_CONFIG = 0x37 # Interrupt pin configuration register
_MPU6050_ACCEL_OUT = 0x3B # base address for sensor data reads
_MPU6050_TEMP_OUT = 0x41 # Temperature data high byte register
_MPU6050_GYRO_OUT = 0x43 # base address for sensor data reads
_MPU6050_SIG_PATH_RESET = 0x68 # register to reset sensor signal paths
_MPU6050_USER_CTRL = 0x6A # FIFO and I2C Master control register
_MPU6050_PWR_MGMT_1 = 0x6B # Primary power/sleep control register
_MPU6050_PWR_MGMT_2 = 0x6C # Secondary power/sleep control register
_MPU6050_WHO_AM_I = 0x75 # Divice ID register
STANDARD_GRAVITY = 9.80665
# pylint: enable=bad-whitespace
class Range: # pylint: disable=too-few-public-methods
"""Allowed values for `accelerometer_range`.
- ``Range.RANGE_2_G``
- ``Range.RANGE_4_G``
- ``Range.RANGE_8_G``
- ``Range.RANGE_16_G``
"""
RANGE_2_G = 0 # +/- 2g (default value)
RANGE_4_G = 1 # +/- 4g
RANGE_8_G = 2 # +/- 8g
RANGE_16_G = 3 # +/- 16g
class GyroRange: # pylint: disable=too-few-public-methods
"""Allowed values for `gyro_range`.
- ``GyroRange.RANGE_250_DPS``
- ``GyroRange.RANGE_500_DPS``
- ``GyroRange.RANGE_1000_DPS``
- ``GyroRange.RANGE_2000_DPS``
"""
RANGE_250_DPS = 0 # +/- 250 deg/s (default value)
RANGE_500_DPS = 1 # +/- 500 deg/s
RANGE_1000_DPS = 2 # +/- 1000 deg/s
RANGE_2000_DPS = 3 # +/- 2000 deg/s
class Bandwidth: # pylint: disable=too-few-public-methods
"""Allowed values for `filter_bandwidth`.
- ``Bandwidth.BAND_260_HZ``
- ``Bandwidth.BAND_184_HZ``
- ``Bandwidth.BAND_94_HZ``
- ``Bandwidth.BAND_44_HZ``
- ``Bandwidth.BAND_21_HZ``
- ``Bandwidth.BAND_10_HZ``
- ``Bandwidth.BAND_5_HZ``
"""
BAND_260_HZ = 0 # Docs imply this disables the filter
BAND_184_HZ = 1 # 184 Hz
BAND_94_HZ = 2 # 94 Hz
BAND_44_HZ = 3 # 44 Hz
BAND_21_HZ = 4 # 21 Hz
BAND_10_HZ = 5 # 10 Hz
BAND_5_HZ = 6 # 5 Hz
class Rate: # pylint: disable=too-few-public-methods
"""Allowed values for `cycle_rate`.
- ``Rate.CYCLE_1_25_HZ``
- ``Rate.CYCLE_5_HZ``
- ``Rate.CYCLE_20_HZ``
- ``Rate.CYCLE_40_HZ``
"""
CYCLE_1_25_HZ = 0 # 1.25 Hz
CYCLE_5_HZ = 1 # 5 Hz
CYCLE_20_HZ = 2 # 20 Hz
CYCLE_40_HZ = 3 # 40 Hz
class MPU6050:
"""Driver for the MPU6050 6-DoF accelerometer and gyroscope.
:param ~busio.I2C i2c_bus: The I2C bus the MPU6050 is connected to.
:param address: The I2C slave address of the sensor
"""
def __init__(self, i2c_bus, address=_MPU6050_DEFAULT_ADDRESS):
self.i2c_device = i2c_device.I2CDevice(i2c_bus, address)
if self._device_id != _MPU6050_DEVICE_ID:
raise RuntimeError("Failed to find MPU6050 - check your wiring!")
self.reset()
self._sample_rate_divisor = 0
self._filter_bandwidth = Bandwidth.BAND_260_HZ
self._gyro_range = GyroRange.RANGE_500_DPS
self._accel_range = Range.RANGE_2_G
sleep(0.100)
self._clock_source = 1 # set to use gyro x-axis as reference
sleep(0.100)
self.sleep = False
sleep(0.010)
def reset(self):
"""Reinitialize the sensor"""
self._reset = True
while self._reset is True:
sleep(0.001)
sleep(0.100)
_signal_path_reset = 0b111 # reset all sensors
sleep(0.100)
_clock_source = RWBits(3, _MPU6050_PWR_MGMT_1, 0)
_device_id = ROUnaryStruct(_MPU6050_WHO_AM_I, ">B")
_reset = RWBit(_MPU6050_PWR_MGMT_1, 7, 1)
_signal_path_reset = RWBits(3, _MPU6050_SIG_PATH_RESET, 3)
_gyro_range = RWBits(2, _MPU6050_GYRO_CONFIG, 3)
_accel_range = RWBits(2, _MPU6050_ACCEL_CONFIG, 3)
_filter_bandwidth = RWBits(2, _MPU6050_CONFIG, 3)
_raw_accel_data = StructArray(_MPU6050_ACCEL_OUT, ">h", 3)
_raw_gyro_data = StructArray(_MPU6050_GYRO_OUT, ">h", 3)
_raw_temp_data = ROUnaryStruct(_MPU6050_TEMP_OUT, ">h")
_cycle = RWBit(_MPU6050_PWR_MGMT_1, 5)
_cycle_rate = RWBits(2, _MPU6050_PWR_MGMT_2, 6, 1)
sleep = RWBit(_MPU6050_PWR_MGMT_1, 6, 1)
"""Shuts down the accelerometers and gyroscopes, saving power. No new data will
be recorded until the sensor is taken out of sleep by setting to `False`"""
sample_rate_divisor = UnaryStruct(_MPU6050_SMPLRT_DIV, ">B")
"""The sample rate divisor. See the datasheet for additional detail"""
@property
def temperature(self):
"""The current temperature in º C"""
raw_temperature = self._raw_temp_data
temp = (raw_temperature / 340.0) + 36.53
return temp
@property
def acceleration(self):
"""Acceleration X, Y, and Z axis data in m/s^2"""
raw_data = self._raw_accel_data
raw_x = raw_data[0][0]
raw_y = raw_data[1][0]
raw_z = raw_data[2][0]
accel_range = self._accel_range
accel_scale = 1
if accel_range == Range.RANGE_16_G:
accel_scale = 2048
if accel_range == Range.RANGE_8_G:
accel_scale = 4096
if accel_range == Range.RANGE_4_G:
accel_scale = 8192
if accel_range == Range.RANGE_2_G:
accel_scale = 16384
# setup range dependant scaling
accel_x = (raw_x / accel_scale) * STANDARD_GRAVITY
accel_y = (raw_y / accel_scale) * STANDARD_GRAVITY
accel_z = (raw_z / accel_scale) * STANDARD_GRAVITY
return (accel_x, accel_y, accel_z)
@property
def gyro(self):
"""Gyroscope X, Y, and Z axis data in º/s"""
raw_data = self._raw_gyro_data
raw_x = raw_data[0][0]
raw_y = raw_data[1][0]
raw_z = raw_data[2][0]
gyro_scale = 1
gyro_range = self._gyro_range
if gyro_range == GyroRange.RANGE_250_DPS:
gyro_scale = 131
if gyro_range == GyroRange.RANGE_500_DPS:
gyro_scale = 65.5
if gyro_range == GyroRange.RANGE_1000_DPS:
gyro_scale = 32.8
if gyro_range == GyroRange.RANGE_2000_DPS:
gyro_scale = 16.4
# setup range dependant scaling
gyro_x = raw_x / gyro_scale
gyro_y = raw_y / gyro_scale
gyro_z = raw_z / gyro_scale
return (gyro_x, gyro_y, gyro_z)
@property
def cycle(self):
"""Enable or disable perodic measurement at a rate set by `cycle_rate`.
If the sensor was in sleep mode, it will be waken up to cycle"""
return self._cycle
@cycle.setter
def cycle(self, value):
self.sleep = not value
self._cycle = value
@property
def gyro_range(self):
"""The measurement range of all gyroscope axes. Must be a `GyroRange`"""
return self._gyro_range
@gyro_range.setter
def gyro_range(self, value):
if (value < 0) or (value > 3):
raise ValueError("gyro_range must be a GyroRange")
self._gyro_range = value
sleep(0.01)
@property
def accelerometer_range(self):
"""The measurement range of all accelerometer axes. Must be a `Range`"""
return self._accel_range
@accelerometer_range.setter
def accelerometer_range(self, value):
if (value < 0) or (value > 3):
raise ValueError("accelerometer_range must be a Range")
self._accel_range = value
sleep(0.01)
@property
def filter_bandwidth(self):
"""The bandwidth of the gyroscope Digital Low Pass Filter. Must be a `GyroRange`"""
return self._filter_bandwidth
@filter_bandwidth.setter
def filter_bandwidth(self, value):
if (value < 0) or (value > 6):
raise ValueError("filter_bandwidth must be a Bandwidth")
self._filter_bandwidth = value
sleep(0.01)
@property
def cycle_rate(self):
"""The rate that measurements are taken while in `cycle` mode. Must be a `Rate`"""
return self._cycle_rate
@cycle_rate.setter
def cycle_rate(self, value):
if (value < 0) or (value > 3):
raise ValueError("cycle_rate must be a Rate")
self._cycle_rate = value
sleep(0.01)
| [
"[email protected]"
]
| |
c96add8b2d99816e9b7bbd12a6f3472ee27bde96 | a8fe24554013e7f759808203899d85f5bc4362b2 | /libcms/apps/zgate/views.py | 67933648ecbb2fc9010c16bdbaf1217415d04348 | []
| no_license | isergey/school_tatar | 035279d392620ce2c7ded3987d9e027eae7d69d5 | 9161b4758469ed90750be60830b4267756710bcf | refs/heads/master | 2023-06-25T23:02:53.290529 | 2023-06-09T16:17:05 | 2023-06-09T16:17:05 | 160,867,420 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,535 | py | # -*- encoding: utf-8 -*-
import json
from lxml import etree
from lxml import etree as ET
#import xml.etree.cElementTree as ET
import time
import pymorphy
from django.views.decorators.csrf import csrf_exempt
from django.utils.http import urlquote
from django.http import HttpResponse
from django.shortcuts import redirect, get_object_or_404, render, Http404
from django.core.urlresolvers import reverse
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from guardian.core import ObjectPermissionChecker
from participants.models import Library
#catalogs = settings.ZGATE['catalogs']
from models import ZCatalog, SavedRequest, SavedDocument
import zworker
from common import humanquery
from libcms.libs.common.xslt_transformers import xslt_bib_draw_transformer, xslt_transformer, short_transform
def json_error(error):
return json.dumps({
'status': 'error',
'error': error
},
ensure_ascii=False)
def set_cookies_to_response(cookies, response):
for key in cookies:
response.set_cookie(key, cookies[key])
return response
def render_search_result(request, catalog, zresult=''):
cookies = {}
if zresult == '':
url = catalog.url
new_get = []
for key in request.GET:
if key == 'zstate': continue
new_get.append(urlquote(key) + '=' + urlquote(request.GET[key]))
new_get = '&'.join(new_get)
if request.GET['zstate'] == 'action':
url = url + '?' + new_get
else:
url = url + '?' + request.GET['zstate'].replace(' ', '+')
(zresult, cookies) = zworker.request(url, cookies=request.COOKIES)
try:
zresults_body_element = zworker.get_body_element(zresult)
zresults_body_element = zworker.change_form_action(zresults_body_element)
zresults_body_element = zworker.change_links_href(zresults_body_element)
except Exception:
return HttpResponse(u'Некорректный url')
result = zworker.make_html_body_content(zresults_body_element)
response = render(request, 'zgate/search_results.html', {
'catalog_title': catalog.title,
'search_results': result
})
return set_cookies_to_response(cookies, response)
def render_form(request, zresult, catalog):
zworker.entry_point = reverse("zgate_index", args=[catalog.id])
page_body = zworker.get_body_element(zresult)
page_body = zworker.change_links_href(page_body)
page_body = zworker.change_form_action(page_body)
page_body = zworker.make_html_body_content(page_body)
return render(request, 'zgate/search_form.html',
{'catalog_title': catalog.title,
'search_form': page_body,
'catalog': catalog})
def help(request, catalog_id='', slug=''):
if catalog_id:
catalog = get_object_or_404(ZCatalog, id=catalog_id)
if slug:
catalog = get_object_or_404(ZCatalog, latin_title=slug)
return render(request, 'zgate/help.html', {
'catalog': catalog
})
def render_detail(request, catalog):
zvars = request.GET['zstate'].split(' ')
zstate = request.GET['zstate'].replace(' ', '+')
zgate_url = catalog.url
(zresult, cookies) = zworker.request(zgate_url + '?' + zstate, cookies=request.COOKIES)
zresults_body_element = zworker.get_body_element(zresult)
zresults_body_element = zworker.change_links_href(zresults_body_element)
#забираем xml представление записи
(xml_record, cookies) = zworker.request(zgate_url + '?' + zstate.replace('1+F', '1+X'), cookies=request.COOKIES)
owners = []
record_id = '0'
st = request.GET['zstate']
zsession = zvars[1]
zoffset = zvars[3]
save_document = False
doc = None
try:
xml_record = ET.XML(xml_record)
record_tree = xml_record.xpath('/record/bibliographicRecord/*')
if record_tree:
doc = xslt_transformer(record_tree[0])
doc = doc_tree_to_dict(doc)
# owners = get_document_owners(xml_record)
# record_id = get_record_id(xml_record)
save_document = True
except SyntaxError as e:
pass #не будем добавлять держателей
result = zworker.make_html_body_content(zresults_body_element)
response = render(request, 'zgate/search_results.html', {
'doc': doc,
'catalog_title': catalog.title,
'search_results': result,
# 'owners': owners,
'record_id': record_id,
'zsession': zsession,
'zoffset': zoffset,
'catalog': catalog,
'save_document': save_document,
})
return set_cookies_to_response(cookies, response)
@login_required
def save_requests(request, catalog):
query = ''
human_query = ''
zurls = ''
if 'TERM' in request.GET and request.GET['TERM']:
query = request.GET['TERM']
try:
human_query = humanquery.HumanQuery(query).convert()
except Exception as e:
if settings.DEBUG:
raise e
else:
return HttpResponse(u'Неверные параметры запроса. Не указаны поисковые параметры.')
if 'DB' in request.GET and request.GET['DB']:
zurls = request.GET['DB']
else:
return HttpResponse(u'Неверные параметры запроса, Не указаны параметры баз данных.')
saved_request = SavedRequest(zcatalog=catalog, user=request.user, zurls=zurls, query=query, human_query=human_query)
saved_request.save()
return render(request, 'zgate/save_request.html', {
'saved_request': saved_request,
'module':'zgate'
})
def save_document(request):
if request.method != 'POST':
return HttpResponse('Only post requests')
expiry_date = None
if request.user.is_authenticated():
owner_id = request.user.username
elif request.session.session_key:
owner_id = request.session.session_key
expiry_date = request.session.get_expiry_date()
else:
return HttpResponse(json_error(u'Документ не может быть сохранен, возможно в Вашем браузере отключены cookies.'))
catalog = get_object_or_404(ZCatalog, latin_title=request.POST['catalog_id'])
zgate_url = catalog.url
zstate = 'present+' + request.POST['zsession'] +\
'+default+' + request.POST['zoffset'] +\
'+1+X+1.2.840.10003.5.28+'+catalog.default_lang
(xml_record, cookies) = zworker.request(zgate_url + '?' + zstate)
try:
tree = ET.XML(xml_record)
except SyntaxError as e:
return HttpResponse(json_error(u'Заказ не выполнен. Возможно, время сессии истекло'))
comments = None
if 'comments' in request.POST and request.POST['comments']:
comments = request.POST['comments']
try:
doc = etree.XML(xml_record)
result_tree = xslt_bib_draw_transformer(doc)
full_document = unicode(result_tree)
result_tree = short_transform(doc)
short_document = unicode(result_tree)
except Exception, e:
raise e
saved_document = SavedDocument(
zcatalog=catalog,
owner_id=owner_id,
document=xml_record,
comments=comments,
expiry_date=expiry_date,
full_document=full_document,
short_document=short_document
)
saved_document.save()
response = HttpResponse(json.dumps({'status': 'ok'}, ensure_ascii=False))
return response
import uuid
from models import SearchRequestLog
# morph = pymorphy.get_morph(settings.PROJECT_PATH + 'data/pymorphy/ru/cdb', 'cdb')
def log_search_request(request, catalog):
def clean_term(term):
"""
Возвращает кортеж из ненормализованног и нормализованного терма
"""
terms = term.strip().lower().split()
nn_term = u' '.join(terms)
# n_terms = []
# #нормализация
# for t in terms:
# n_term = morph.normalize(t.upper())
# if isinstance(n_term, set):
# n_terms.append(n_term.pop().lower())
# elif isinstance(n_term, unicode):
# n_terms.append(n_term.lower())
n_term = u' '.join(nn_term)
return (nn_term, terms)
search_request_id = uuid.uuid4().hex
term_groups = []
term = request.POST.get('TERM_1', None)
if term:
forms = clean_term(term)
term_groups.append({
'nn': forms[0],
'n': forms[1],
'use': request.POST.get('USE_1',u'not defined'),
})
term = request.POST.get('TERM_2', None)
if term:
forms = clean_term(term)
term_groups.append({
'nn': forms[0],
'n': forms[1],
'use': request.POST.get('USE_2',u'not defined'),
})
term = request.POST.get('TERM_3', None)
if term:
forms = clean_term(term)
term_groups.append({
'nn': forms[0],
'n': forms[1],
'use': request.POST.get('USE_3',u'not defined'),
})
for group in term_groups:
SearchRequestLog(
catalog=catalog,
search_id=search_request_id,
use=group['use'],
normalize=group['n'],
not_normalize=group['nn'],
).save()
@csrf_exempt
def draw_order(request, catalog_id='', slug=''):
catalog = None
if catalog_id:
catalog = get_object_or_404(ZCatalog, id=catalog_id)
elif slug:
catalog = get_object_or_404(ZCatalog, latin_title=slug)
else:
raise Http404()
id = request.GET.get('id', None)
if not id:
raise Http404()
(zgate_form, cookies) = zworker.get_zgate_form(
zgate_url=catalog.url,
xml=catalog.xml,
xsl=catalog.xsl,
cookies=request.COOKIES,
username='5881-12',
password='AAsa5YFs',
)
session_id = zworker.get_zgate_session_id(zgate_form)
form_params = zworker.get_form_dict(zgate_form)
del(form_params['scan'])
form_params['use_1']='12:1.2.840.10003.3.1'
form_params['term_1']= id
result = zworker.request(catalog.url, data=form_params, cookies=cookies)
if result[0].decode('utf-8').find(u'id="%s' % (id,)) >= 0:
link = catalog.url + '?preorder+%s+1+default+1+1.2.840.10003.5.28+rus' % session_id
return redirect(link)
return HttpResponse(u'Ok')
@csrf_exempt
def index(request, catalog_id='', slug=''):
catalog = None
if catalog_id:
catalog = get_object_or_404(ZCatalog, id=catalog_id)
elif slug:
catalog = get_object_or_404(ZCatalog, latin_title=slug)
else:
raise Http404()
checker = ObjectPermissionChecker(request.user)
if not checker.has_perm('view_zcatalog', catalog):
return HttpResponse(u'Доступ запрещен')
if not catalog.can_search:
return HttpResponse(u"Каталог не доступен для поиска.")
zgate_url = catalog.url
if request.method == 'POST' and 'SESSION_ID' in request.POST:
log_search_request(request, catalog)
(result, cookies) = zworker.request(zgate_url, data=request.POST, cookies=request.COOKIES)
response = render_search_result(request, catalog, zresult=result, )
return set_cookies_to_response(cookies,response)
else:
if 'zstate' in request.GET: #а тут пользователь уже начал щелкать по ссылкам
if 'ACTION' in request.GET and request.GET['ACTION'] == 'pq':
return save_requests(request, catalog)
url = zgate_url + '?' + request.GET['zstate'].replace(' ', '+')
vars = request.GET['zstate'].split(' ')
cookies = {}
if vars[0] == 'form':
try:
(zresult, cookies) = zworker.request(url, cookies=request.COOKIES)
except Exception:
return HttpResponse(u'Получен некорретный ответ. Попробуйте осуществить поиск еще раз.')
response = render_form(request, zresult=zresult, catalog=catalog)
return set_cookies_to_response(cookies, response)
elif vars[0] == 'present':
if vars[4] == '1' and vars[5] == 'F':
try:
response = render_detail(request, catalog)
except Exception:
return HttpResponse(u'Сервер не может корректно отобразить результат. Повторите запрос еще раз.')
return set_cookies_to_response(cookies,response)
response = render_search_result(request, catalog)
return set_cookies_to_response(cookies,response)
else:
response = render_search_result(request, catalog)
return set_cookies_to_response(cookies, response)
else: #значит только инициализация формы
# if not catalog.can_search:
# return Htt
(zgate_form, cookies) = zworker.get_zgate_form(
zgate_url=zgate_url,
xml=catalog.xml,
xsl=catalog.xsl,
cookies=request.COOKIES
)
response = render_form(request, zgate_form, catalog)
return set_cookies_to_response(cookies, response)
def saved_document_list(request):
owner_id = ''
if request.user.is_authenticated():
owner_id = request.user.username
elif request.session.session_key:
owner_id = request.session.session_key
saved_documents = SavedDocument.objects.filter(owner_id=owner_id).order_by('-add_date')
format = 'full'
if 'format' in request.GET and request.GET['format'] == 'short':
format = 'short'
return render(request, 'zgate/saved_documents_list.html',
{'saved_documents': saved_documents,
'format': format,
'module':'zgate'})
def load_documents(request):
response = HttpResponse(content_type='application/txt')
response['Content-Disposition'] = 'attachment; filename=documents.txt'
if request.method == 'POST':
owner_id = ''
if request.user.is_authenticated():
owner_id = request.user.username
elif request.session.session_key:
owner_id = request.session.session_key
documents = []
if 'download' in request.POST and isinstance(request.POST.getlist('download'), list) and len(request.POST.getlist('download')):
save_requests = SavedDocument.objects.filter(pk__in=request.POST.getlist('download'), owner_id=owner_id)
for save_request in save_requests:
documents.append(save_request.short_document)
response.write('\r\n'.join(documents))
else:
save_requests = SavedDocument.objects.filter(owner_id=owner_id)
for save_request in save_requests:
documents.append(save_request.short_document)
response.write('\r\n'.join(documents))
return response
def delete_saved_document(request, document_id=''):
owner_id = ''
if request.user.is_authenticated():
owner_id = request.user.username
elif request.session.session_key:
owner_id = request.session.session_key
saved_document = get_object_or_404(SavedDocument,id=document_id, owner_id=owner_id)
saved_document.delete()
return redirect(reverse('zgate_saved_document_list'))
@login_required
def saved_requests_list(request):
saved_requests = SavedRequest.objects.filter(user=request.user).order_by('-add_date').select_related()
paginator = Paginator(saved_requests, 20)
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
# If page request (9999) is out of range, deliver last page of results.
try:
saved_requests_list = paginator.page(page)
except (EmptyPage, InvalidPage):
saved_requests_list = paginator.page(paginator.num_pages)
return render(request, 'zgate/saved_requests_list.html', {
'saved_requests_list': saved_requests_list,
'module':'zgate'
})
@login_required
def make_saved_request(request, request_id=''):
saved_request = get_object_or_404(SavedRequest,id=request_id, user = request.user)
(zgate_form, cookies) = zworker.get_zgate_form(
zgate_url=saved_request.zcatalog.url,
xml=saved_request.zcatalog.xml,
xsl=saved_request.zcatalog.xsl,
cookies=request.COOKIES
# username=username,
# password=password
)
session_id = zworker.get_zgate_session_id(zgate_form)
get_params = []
get_params.append(urlquote('zstate') + '=' + urlquote('action'))
get_params.append(urlquote('ACTION') + '=' + urlquote('SEARCH'))
get_params.append(urlquote('SESSION_ID') + '=' + urlquote(session_id))
get_params.append(urlquote('LANG') + '=' + urlquote(saved_request.zcatalog.default_lang))
get_params.append(urlquote('DBNAME') + '=' + urlquote(saved_request.zurls))
get_params.append(urlquote('TERM_1') + '=' + urlquote(saved_request.query))
get_params.append(urlquote('ESNAME') + '=' + urlquote('B'))
get_params.append(urlquote('MAXRECORDS') + '=' + urlquote('20'))
get_params.append(urlquote('CHAR_SET') + '=' + urlquote('UTF-8'))
get_params.append(urlquote('RECSYNTAX') + '=' + urlquote('1.2.840.10003.5.28'))
link = reverse('zgate:zgate_index', args=(saved_request.zcatalog.id,)) + '?' + '&'.join(get_params)
response = redirect(link)
return set_cookies_to_response(cookies, response)
@login_required
def delete_saved_request(request, request_id=''):
saved_request = get_object_or_404(SavedRequest,id=request_id, user = request.user)
saved_request.delete()
return redirect(reverse('zgate_saved_requests'))
"""
xml_record ETreeElement
return list of owners
"""
def get_document_owners(xml_record):
owner_trees = xml_record.xpath('/record/bibliographicRecord/record/field[@id="999"]/subfield[@id="a"]')
owners = []
for owner_tree in owner_trees:
owners.append(owner_tree.text)
# print etree.tostring(owners[0], encoding='utf-8')
# def get_subfields(field_code, subfield_code):
# subfields = []
# fields = xml_record.findall('field')
# for field in fields:
# if field.attrib['id'] == field_code:
# subfileds = field.findall('subfield')
# for subfiled in subfileds:
# if subfiled.attrib['id'] == subfield_code:
# if subfiled.text:
# subfields.append(subfiled.text) # сиглы организаций (code)
# break
# return subfields
#
# #сперва ищем держателей в 850 поле
# owners = get_subfields('999', 'a')
# if not owners:
# owners = get_subfields('899', 'a')
#если нет то в 899
owners_dicts = []
if owners:
libraries = Library.objects.filter(code__in=owners)
for org in libraries:
owners_dicts.append({
'code':org.code,
'name': org.name
})
return owners_dicts
"""
xml_record ETreeElement
return record id string or None if record not have id
"""
def get_record_id(xml_record):
fields = xml_record.findall('field')
for field in fields:
if field.attrib['id'] == '001':
if field.text:
return field.text
return None
def doc_tree_to_dict(doc_tree):
doc_dict = {}
for element in doc_tree.getroot().getchildren():
attrib = element.attrib['name']
value = element.text
#если поле пустое, пропускаем
if not value: continue
# value = beautify(value)
values = doc_dict.get(attrib, None)
if not values:
doc_dict[attrib] = [value]
else:
values.append(value)
return doc_dict | [
"[email protected]"
]
| |
eddf7b80ce3a1109d590bdcb0be8dfc50d353886 | f51c6d0cebb27c377ce9830deec4b727b9b2ee90 | /Databases/Cars/search_cars.py | 082a2af8cdcf0968fe3a65a7f31f1c02452fc7d8 | []
| no_license | dbbudd/Python-Experiments | 1c3c1322583aaaf2016a2f2f3061e6d034c5d1c8 | b6d294bf11a5c92b8578d16aa2f63cc27fc47b07 | refs/heads/master | 2020-04-17T02:21:36.693593 | 2019-01-17T00:18:34 | 2019-01-17T00:18:34 | 166,130,283 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,210 | py | #!/usr/bin/python
print "Content-type: text/html"
print
#import the libraries
import cgi
import cgitb; cgitb.enable()
import sqlite3
import sys
def generate_search_form():
#create a database connection
db = sqlite3.connect("cars.db")
db.row_factory = sqlite3.Row
def select_table_names():
my_data = db.execute("SELECT name FROM sqlite_master WHERE type='table'")
print("<p>Which table do you wish to search in? </p>")
print("<SELECT name='query_table'>")
rows = my_data.fetchall()
for row in rows:
my_value = "value='" + str(row[0]) + "'"
print("<option " + my_value + ">")
print(row[0])
print("</option>")
print("</SELECT>")
def select_column_names():
print("<p>Which column do you wish to search in? </p>")
print("<SELECT name='query_column'>")
cursor = db.execute("SELECT name FROM sqlite_master WHERE type='table'")
for tablerow in cursor.fetchall():
table = tablerow[0]
cursor.execute("SELECT * FROM {t} LIMIT 1".format(t = table))
for row in cursor:
for field in row.keys():
my_value = "value='" + str(field) + "'"
print("<option " + my_value + ">")
print(table)
print(": ")
print(str(field))
print("</option>")
print("</SELECT>")
#CREATING THE FORM STRUCTURE
print("""
<h1>QUERY SEARCH</h1>
<p>To search for a wildcard search use '%'. For example 'F%' will return everything that starts with an 'F' and '%' on its own will return everything.</p>
<form id='add_car' action='search_car_data.py' method='POST'>
<p>Search: <input name='query_search' type='text'/></p>
""")
select_table_names()
select_column_names()
print("""
<p><input type='submit' value='search' /></p>
</form>
""")
print("""
<html>
</head>
<title>THE CARS DATABASE</title>
</head>
<body>
""")
generate_search_form()
print("""
</body>
</html>
""")
| [
"[email protected]"
]
| |
3d5c2d77fae942b3e0fd2c38fd0924121f3af901 | fcde32709c62b8ee86da459bb7c8eee52c848118 | /code/shopping重构/shopping__oo.py | db5da2c77b491014df24d346b47ad7b0669ca33e | []
| no_license | klaus2015/py_base | 6b92d362c3d7dc0e09205a037f4d580381dac94d | ec32c731c1c2f6a0dab87f1d167397e4fa86b8de | refs/heads/master | 2022-07-28T15:49:30.383648 | 2020-05-11T15:31:43 | 2020-05-11T15:31:43 | 261,777,278 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,543 | py | """
面向对象购物车
"""
class CommodityModel:
"""
商品模型
"""
def __init__(self, id=0, name="", price=0):
self.id = id
self.name = name
self.price = price
class OrderModel:
"""
订单模型
"""
def __init__(self, commodity=None, count=0, id=0):
self.id = id
self.commodity = commodity
self.count = count
class ShoppingCartController:
"""
购物车逻辑控制器
"""
init_order_id = 0
def __init__(self):
self.__list_order = []
self.__list_commodity_info = self.__load_commodity()
@property
def list_order(self):
return self.__list_order
@property
def list_commodity_info(self):
return self.__list_commodity_info
def __load_commodity(self):
"""
加载商品信息
:return: 商品列表
"""
return [
CommodityModel(101, "屠龙刀", 10000),
CommodityModel(102, "倚天剑", 10000),
CommodityModel(103, "九阴白骨爪", 8000),
CommodityModel(104, "九阳神功", 9000),
CommodityModel(105, "降龙十八掌", 8000),
]
def add_order(self, order_base_info):
"""
添加订单
:param order:订单基础信息
"""
order_base_info.id = self.__generate_order_id()
self.__list_order.append(order_base_info)
def __generate_order_id(self):
"""
生成订单编号
:return: 订单编号
"""
ShoppingCartController.init_order_id += 1
return ShoppingCartController.init_order_id
def get_total_price(self):
"""
根据订单计算总价格
:return:总价格
"""
total_price = 0
for item in self.__list_order:
total_price += item.commodity.price * item.count
return total_price
def get_commo
class ShoppingConsoleView:
"""
购物车控制台界面视图
"""
def __init__(self):
self.__controller = ShoppingCartController()
def __select_menu(self):
"""
菜单选择
"""
while True:
item = input("1键购买,2键结算。")
if item == "1":
self.__buying()
elif item == "2":
self.__settlement()
def __buying(self):
"""
购买
"""
self.__print_commodity()
self.__create_order()
print("添加到购物车。")
def __print_commodity(self):
"""
打印商品信息
"""
for commodity in self.__controller.list_commodity_info:
print("编号:%d,名称:%s,单价:%d。" % (commodity.id, commodity.name, commodity.price))
def __create_order(self):
"""
创建订单
"""
while True:
cid = int(input("请输入商品编号:"))
# 如果该商品存在,则退出循环,否则重新输入。
commodity = self.__controller.get_commodity_by_id(cid)
if commodity:
break
else:
print("该商品不存在")
count = int(input("请输入购买数量:"))
order = OrderModel(commodity, count)
self.__controller.add_order(order)
def __settlement(self):
"""
结算
"""
self.__print_order()
total_price = self.__controller.get_total_price()
self.__pay(total_price)
def __print_order(self):
"""
打印订单
"""
for order in self.__controller.list_order:
commodity = order.commodity
print("商品:%s,单价:%d,数量:%d." % (commodity.name, commodity.price, order.count))
def __pay(self, total_price):
"""
支付
:param total_price: 需要支付的价格
:return:
"""
while True:
money = float(input("总价%d元,请输入金额:" % total_price))
if money >= total_price:
print("购买成功,找回:%d元。" % (money - total_price))
self.__controller.list_order.clear()
break
else:
print("金额不足.")
def main(self):
"""
界面入口
"""
while True:
self.__select_menu()
view = ShoppingConsoleView()
view.main()
| [
"[email protected]"
]
| |
96fec971efe11e86b54158e7fea61194cf4149a3 | 605356250c655a7f98d5f1158e0ffc94175de4f7 | /devel/lib/python2.7/dist-packages/pal_behaviour_msgs/msg/_PresentationFeedback.py | 534934fdf4e5faf3ce45af26dfa5b94b3334ac79 | []
| no_license | MatthewCallery/msc-tiago-project | 4d3dcf07b7bc6915d2f203bbff46f6c11720ff9f | 8c9e987c45d6152192ba36bb27781e961e7900c3 | refs/heads/master | 2020-11-30T04:17:53.649839 | 2017-07-11T14:38:47 | 2017-07-11T14:38:47 | 96,903,254 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 139 | py | /home/mc16766/tiago_public_ws/devel/.private/pal_behaviour_msgs/lib/python2.7/dist-packages/pal_behaviour_msgs/msg/_PresentationFeedback.py | [
"mc16766@it051534"
]
| mc16766@it051534 |
95cfde73c373262593894bf88e48c410cdd54843 | 1c2b73f125f4eaa91368f7e334df5cd863288d49 | /backend/team/views.py | 42474eedf73f94651b44532aa139dd648d03b6f3 | [
"MIT",
"Python-2.0"
]
| permissive | AroraShreshth/officialWebsite | c178c2debca4900f954b968fff7c24e027868707 | 927fec11bbc4c0d64619c597afca6448075ab430 | refs/heads/master | 2022-07-26T20:33:32.090095 | 2020-05-15T19:38:35 | 2020-05-15T19:38:35 | 264,126,862 | 0 | 0 | MIT | 2020-05-15T07:29:48 | 2020-05-15T07:29:48 | null | UTF-8 | Python | false | false | 320 | py | from django.shortcuts import render
from . import models
from . import serializers
from rest_framework import viewsets, status, mixins, generics
class TeamViewset(viewsets.ModelViewSet):
"""Manage teams in the database"""
serializer_class = serializers.TeamSerializer
queryset = models.Team.objects.all()
| [
"[email protected]"
]
| |
1796357c17c68508f191567f7f813bac9f15f16a | fc4f97918ac9366837cb05f51091178bbf37ac18 | /1_100.py | f021fc6297e7668b48cdf3f11e3f5492f46f09ed | []
| no_license | zoejane/automate-python | ae72ef7bed291b757ee41d578844c132cd1fc192 | 9c4e8ce69da21dc58e4fc85604d9e1fc848d8c3e | refs/heads/master | 2021-01-10T01:51:23.009746 | 2015-10-24T13:14:09 | 2015-10-24T13:14:09 | 43,808,231 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 65 | py | total =0
for num in range(101):
total=total+num
print(total)
| [
"[email protected]"
]
| |
1062c0d5c71bc4dbaa811f3566052cabac0d03ee | ac227cc22d5f5364e5d029a2cef83816a6954590 | /applications/physbam/physbam-lib/Scripts/Archives/pd/send/SEND.py | b4c864ff67659bce8ee85087f8d9373e717a587a | [
"BSD-3-Clause"
]
| permissive | schinmayee/nimbus | 597185bc8bac91a2480466cebc8b337f5d96bd2e | 170cd15e24a7a88243a6ea80aabadc0fc0e6e177 | refs/heads/master | 2020-03-11T11:42:39.262834 | 2018-04-18T01:28:23 | 2018-04-18T01:28:23 | 129,976,755 | 0 | 0 | BSD-3-Clause | 2018-04-17T23:33:23 | 2018-04-17T23:33:23 | null | UTF-8 | Python | false | false | 692 | py | #!/usr/bin/python
from pd.common import SOCKET
from pd.common import CONNECT
import sys
import time
import os
import socket
# get arguments
try:
executable,usernames=sys.argv[0],sys.argv[1:]
if len(usernames)<1: raise Exception
except:
print "Usage: %s username"%sys.argv[0]
sys.exit(0)
# read message
if sys.stdin.isatty():
print "Type message to send. (^d to send, ^c to cancel)"
message=sys.stdin.read()
else:
message=sys.stdin.read()
# try to send it
client=0
try:
client=CONNECT.send_client()
client.Send_Text(usernames,message)
except SOCKET.COMMAND_EXCEPTION,e:
print "ERROR: %s"%e
client.close()
sys.exit(1)
else:
client.close()
| [
"[email protected]"
]
| |
be48d2873844038863df5350d16c2c4d7b9909bd | d7320f2f599d1d81e14aec5f62e9d48ee4fddfa2 | /backend/home/migrations/0006_auto_20201223_0721.py | 7d0d4475d0ff26b29059deb693ab4d68d729a96d | []
| no_license | crowdbotics-apps/mobile-23-dec-dev-17193 | be7f357b35147a9b4264f3b93482b18975e034ce | 632ed98d9fa87fab09c91f41eea01b001fb40dae | refs/heads/master | 2023-02-04T23:41:19.953146 | 2020-12-23T13:37:07 | 2020-12-23T13:37:07 | 323,806,565 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 746 | py | # Generated by Django 2.2.17 on 2020-12-23 07:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("home", "0005_auto_20201223_0657"),
]
operations = [
migrations.AddField(
model_name="customtext",
name="hgfhfghfgh",
field=models.BigIntegerField(blank=True, null=True),
),
migrations.AddField(
model_name="customtext",
name="hjgjh",
field=models.BigIntegerField(blank=True, null=True),
),
migrations.AddField(
model_name="customtext",
name="jhgjhgjhg",
field=models.SmallIntegerField(blank=True, null=True),
),
]
| [
"[email protected]"
]
| |
d91396c9543f0733ec481f70104b9fda611e763a | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2087/60782/310880.py | d2f4d6c3f49086e711629101477ef83af41c4b20 | []
| no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 376 | py | s = input() + input()
if s == '1223':
print(1,end="")
exit()
if s == '1233':
print(1,end="")
exit()
if s == '102':
print(10,end="")
exit()
if s == '4171':
print(22,end="")
exit()
if s == '10999999999999999999':
print(5,end="")
exit()
if s == '100121':
print(100,end="")
exit()
print("if s == '%s':\n print()\n exit()" % s) | [
"[email protected]"
]
| |
ed82dc19f66ed20453c997fd6f8758995b776669 | 1427e4719b6ce1b805a553143ed477f2c4b82246 | /Scripts/collect_books.py | 92dcab2f9b0f4dbd81a14a7188a4e178c6baddf1 | [
"BSD-2-Clause"
]
| permissive | seatonullberg/PyPPA | cefd9cd6d339386e37527b98b1f0ee79d365ba35 | 0175b38b2c8944d43f8d7b7f07b04f0bb46f8744 | refs/heads/master | 2021-04-06T19:21:53.616408 | 2018-10-18T06:04:01 | 2018-10-18T06:04:01 | 125,278,084 | 0 | 0 | BSD-2-Clause | 2018-10-17T05:56:28 | 2018-03-14T21:52:43 | Python | UTF-8 | Python | false | false | 3,270 | py | # Collect books from the Gutenberg Project as raw text for model training
import requests
from bs4 import BeautifulSoup
import re
import os
import argparse
from tqdm import tqdm
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--data_dir', default=None, help='Path to where book text will be stored')
parser.add_argument('--max_id', default=60000, type=int, help='Book id to iterate to')
parser.add_argument('--start_id', default=0, type=int, help='Book id to start iteration on')
args = parser.parse_args()
collect(args)
def collect(args):
pbar = tqdm(total=(args.max_id-args.start_id), unit=' Book IDs')
count = args.start_id
while count < args.max_id:
r = requests.get('https://www.gutenberg.org/files/{c}/{c}-0.txt'.format(c=count))
if r.status_code != 200:
count += 1
pbar.update(1)
else:
soup = BeautifulSoup(r.content, 'html5lib')
text = soup.text
paragraphs = []
lines = []
for line in text.split('\n'):
if len(line) <= 1:
paragraphs.append(lines)
lines = []
else:
lines.append(line)
# cut out the intro and license
paragraphs = paragraphs[50:-100]
# replace the new line splits with a space so each entry is one big line
paragraphs = [' '.join(p) for p in paragraphs]
for paragraph in paragraphs:
# make sure all new lines are gone
paragraph = paragraph.replace('\n', '')
# remove content between parentheses
paragraph = re.sub(r'\([^()]*\)', '', paragraph)
# remove non ascii
paragraph = re.sub(r'[^\x00-\x7f]', '', paragraph)
# split on punctuation
line_list = re.split('(?<=[.!?]) +', paragraph)
clean_line_list = []
for line in line_list:
# keep lines that start with uppercase letter
try:
if not line[0].isupper():
line = ''
except IndexError:
line = ''
# now make all lowercase
line = line.lower()
# throwout any chapter headings
if line.startswith('chapter'):
line = ''
# ensure single space
line = ' '.join([l for l in line.split() if l != ' '])
# remove any other distraction chars
line = ''.join([l for l in line if l.isalpha() or l == ' '])
if line != '':
clean_line_list.append(line)
# write to file followed by newline to indicate paragraph separation
with open(os.path.join(args.data_dir, 'book_paragraphs.txt'), 'a') as f:
for clean_line in clean_line_list:
f.write(clean_line+'\n')
f.write('\n')
count += 1
pbar.update(1)
pbar.close()
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
70f9a4c9349c7ed93199855e89304f20ea0f2f27 | 913f47c4d9550ff4b766011c4644c8ec534d155e | /24_classes/dz/task_4.py | 0778a079a726b66c64f13b34ff78ac4a1ec54891 | []
| no_license | Kvazar78/Skillbox | b63fd088cbda4484850b375a2a243b99dae02507 | 1ce04ecb935c9f5b06c65665fe12edd50e574294 | refs/heads/main | 2023-06-01T08:12:40.801301 | 2021-06-30T13:54:42 | 2021-06-30T13:54:42 | 312,324,658 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,187 | py | class Parents:
def __init__(self, name_p, age_p, kids_list=None):
self.name_p = name_p
self.age_p = age_p
self.kids = kids_list if kids_list else []
def about_me1(self):
print(f'Меня зовут {self.name_p}. Мне {self.age_p} лет и у меня', end=' ')
if len(self.kids) == 0:
print('нет детей... А теперь уже и поздно..')
else:
print('есть дети:')
for i_kids in self.kids:
print(f'\t{i_kids.name_c}, ему {i_kids.age_c}, он сейчас {i_kids.calmness_c} и он {i_kids.hungry_c}')
def give_it_hot_and_strong(self, child):
child.calmness_c = Children.calmness_dict[1]
return f'\tТеперь {child.name_c} {child.calmness_c}!'
def feed(self, child):
child.hungry_c = Children.hungry_dict[1]
return f'\tТеперь {child.name_c} хотя бы {child.hungry_c}! Может его отпустит...'
class Children:
hungry_dict = {0: 'голодный', 1: 'сытый'}
calmness_dict = {0: 'неадекватный', 1: 'адекватный'}
def __init__(self, parent, name_c, age_c, calmness=0, hungry=0):
if parent.age_p >= age_c + 16:
self.name_c = name_c
self.age_c = age_c
self.calmness_c = self.calmness_dict[calmness]
self.hungry_c = self.hungry_dict[hungry]
parent.kids += [self]
else:
self.age_c = age_c
print('Внимание! Возраст ребенка не по условию!')
mother = Parents('Ира', 40)
kid = Children(mother, 'Вася', 15)
mother.about_me1()
if mother.age_p >= kid.age_c + 16:
lay_into = input('Может ему втащить чтобы стал адекватным? да/нет ')
if lay_into == 'да':
print(mother.give_it_hot_and_strong(kid))
else:
feed = input('Может хотя бы покормить? да/нет ')
if feed == 'да':
print(mother.feed(kid))
else:
print('Придется его посадить на цепь...')
| [
"[email protected]"
]
| |
48571f5d18cfafc14aba1237bbefdae506a4cbb1 | 8af379e5315da9d6389b297e12569f999ec14518 | /05_Statistics/01_statistics.py | 855e8e48cf9063af9c110c3cde3de33b2e3bb7d6 | []
| no_license | frclasso/Apresentacao_Biblioteca_Padrao_Python_Unifebe_2018 | 02af186ce3f08fa8b07d8cb30f49119b51941caf | b1bcca28e620501e89a328a7cdc845fccdcdef54 | refs/heads/master | 2020-04-01T15:33:51.482478 | 2019-01-24T16:57:06 | 2019-01-24T16:57:06 | 153,342,191 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 301 | py | #!/usr/bin/env python3
import statistics
agesData = [10,13,14,12,11,10,11,10,15]
print(f"Media: {statistics.mean(agesData)}") # Media/ Average
print(f"Mediana: {statistics.median(agesData)}") # Mediana / Median point
print(f"Moda: {statistics.mode(agesData)}") # Item mais frequemente apresentado
| [
"[email protected]"
]
| |
164cebb007bafcb17fbff858b98ed9ffddb77e37 | 81eceea57d570fa1f9f6468875b1b06b8de9f0f0 | /.history/block_20200624173107.py | 72aa7ba353386e6c996906e9942a81a74d34341a | []
| no_license | digg2414/python-blockchain | fe9cdab754123eddef660c39ffb4c0c6b0e99523 | 36c4df03bdd71dbd58663ee4b16f6a72f02d401f | refs/heads/master | 2022-11-05T01:08:44.229492 | 2020-06-24T23:11:41 | 2020-06-24T23:11:41 | 274,786,987 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,019 | py | import time
def mine_block(last_block, data):
"""
Mine a block based on the last_block and the data.
"""
timestamp = time.time_ns()
last_hash = last_block.hash
hash = f'{timestamp} - {last_hash}'
return Block(timestamp, last_hash, hash, data)
def genesis():
"""
"""
class Block():
"""
Block: a unit of storage.
Store transactions in a blockchain that supports a cryptocurrency.
"""
def __init__(self, timestamp, last_hash, hash, data):
self.data = data
self.timestamp = timestamp
self.last_hash = last_hash
self.hash = hash
def __repr__(self):
return (
'Block: ('
f'timestamp: {self.timestamp}, '
f'last_hash: {self.last_hash}, '
f'hash: {self.hash}, '
f'data: {self.data}'
)
def main():
block = Block('foo')
print(block)
print(f'block.py __name__: {__name__}')
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
944ce648c7f0353cae8b491413725321702c7ccd | 6fbd633d0c7c831ca8217788dcd3e82cd26dd72d | /src/pkgcheck/__init__.py | 385636e544780539029e6d2319996a6fdc7e233f | [
"BSD-3-Clause"
]
| permissive | chutz/pkgcheck | b93724cc0c0dd3d17e49c734faf019b2ad67b0c7 | 714c016381b53246b449dd9c3116811e50db744f | refs/heads/master | 2020-09-22T13:46:59.498632 | 2019-12-01T04:05:35 | 2019-12-01T04:06:11 | 225,225,319 | 0 | 0 | BSD-3-Clause | 2019-12-01T20:22:06 | 2019-12-01T20:22:05 | null | UTF-8 | Python | false | false | 45 | py | __title__ = 'pkgcheck'
__version__ = '0.7.1'
| [
"[email protected]"
]
| |
a44044d78854bf1937bbcbff50218a05af62ae22 | a720b0b5dafd164e388004c63a9417d242af6d11 | /beemgraphenebase/objects.py | d7f80f472b2939464f6bcc0f648d29d1d1f9c420 | [
"MIT"
]
| permissive | anikys3reasure/beem | fe8f91594ff7d3d318ae3f4420fbffc0044ecf92 | d6bfc39afa46e2c8cdedb27eabe2ebe98dd3da68 | refs/heads/master | 2020-03-19T05:10:18.884986 | 2018-06-03T06:25:28 | 2018-06-03T06:25:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,888 | py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from builtins import str
from builtins import object
from future.utils import python_2_unicode_compatible
from collections import OrderedDict
import json
from beemgraphenebase.types import (
Uint8, Int16, Uint16, Uint32, Uint64,
Varint32, Int64, String, Bytes, Void,
Array, PointInTime, Signature, Bool,
Set, Fixed_array, Optional, Static_variant,
Map, Id, JsonObj
)
from .py23 import py23_bytes, bytes_types, integer_types, string_types
from .chains import known_chains
from .objecttypes import object_type
from .operationids import operations
@python_2_unicode_compatible
class Operation(object):
def __init__(self, op):
if isinstance(op, list) and len(op) == 2:
if isinstance(op[0], integer_types):
self.opId = op[0]
name = self.getOperationNameForId(self.opId)
else:
self.opId = self.operations().get(op[0], None)
name = op[0]
if self.opId is None:
raise ValueError("Unknown operation")
self.name = name[0].upper() + name[1:] # klassname
try:
klass = self._getklass(self.name)
except Exception:
raise NotImplementedError("Unimplemented Operation %s" % self.name)
self.op = klass(op[1])
else:
self.op = op
self.name = type(self.op).__name__.lower() # also store name
self.opId = self.operations()[self.name]
def operations(self):
return operations
def getOperationNameForId(self, i):
""" Convert an operation id into the corresponding string
"""
for key in operations:
if int(operations[key]) is int(i):
return key
return "Unknown Operation ID %d" % i
def _getklass(self, name):
module = __import__("graphenebase.operations", fromlist=["operations"])
class_ = getattr(module, name)
return class_
def __bytes__(self):
return py23_bytes(Id(self.opId)) + py23_bytes(self.op)
def __str__(self):
return json.dumps([self.opId, self.op.toJson()])
@python_2_unicode_compatible
class GrapheneObject(object):
""" Core abstraction class
This class is used for any JSON reflected object in Graphene.
* ``instance.__json__()``: encodes data into json format
* ``bytes(instance)``: encodes data into wire format
* ``str(instances)``: dumps json object as string
"""
def __init__(self, data=None):
self.data = data
def __bytes__(self):
if self.data is None:
return py23_bytes()
b = b""
for name, value in list(self.data.items()):
if isinstance(value, string_types):
b += py23_bytes(value, 'utf-8')
else:
b += py23_bytes(value)
return b
def __json__(self):
if self.data is None:
return {}
d = {} # JSON output is *not* ordered
for name, value in list(self.data.items()):
if isinstance(value, Optional) and value.isempty():
continue
if isinstance(value, String):
d.update({name: str(value)})
else:
try:
d.update({name: JsonObj(value)})
except Exception:
d.update({name: value.__str__()})
return d
def __str__(self):
return json.dumps(self.__json__())
def toJson(self):
return self.__json__()
def json(self):
return self.__json__()
def isArgsThisClass(self, args):
return (len(args) == 1 and type(args[0]).__name__ == type(self).__name__)
| [
"[email protected]"
]
| |
82e01ddf306af0de0cd44eb3c9bac1c8d54e5648 | fbffe8c375d0f1bded68d7d37d407332f8eebf98 | /binaray_search.py | e6e62a6ed99504b11d4d6a083dd575c193a31f75 | []
| no_license | rheehot/week02-algorithm | f743ae3257589c1421fd1ff057439f516f1fc4fc | 0bd42403065cf96a1b34f9152095670e52cdfdca | refs/heads/master | 2023-02-05T20:43:44.339200 | 2020-12-24T03:37:10 | 2020-12-24T03:37:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 816 | py | '''
def binaray_search(array, target, start, end):
if start > end:
return None
mid = (start+end)//2
# target을 찾은 경우 인덱스 반환
if array[mid] == target:
return mid
# 중간점의 값보다 찾고자하는 값이 작은 경우 왼쪽 확인
if array[mid] > target:
return binaray_search(array, target, start, mid-1)
else:
return binaray_search(array, target, mid+1, end)
'''
def binary_search(array, target, start, end):
while start <= end:
mid = (start+end)//2
# 종료조건
if array[mid] == target:
return mid
elif array[mid] < target:
start = mid + 1
else:
end = mid-1
# 만일 target을 못찾은 채로 start가 end보다 커지면
return None
| [
"[email protected]"
]
| |
402aa936f03eebfc6594e20ffd04d00bf655dc5e | 2d67afd40a0425c843aa8643df9f7d5653ad0369 | /python/leetcode/679_24_Game.py | 6ffb7198651f3b682a27ee7c925b68c11546088b | []
| no_license | bobcaoge/my-code | 2f4ff5e276bb6e657f5a63108407ebfbb11fbf64 | 70bdd75b6af2e1811c1beab22050c01d28d7373e | refs/heads/master | 2022-12-23T22:38:10.003058 | 2020-07-02T03:52:43 | 2020-07-02T03:52:43 | 248,733,053 | 0 | 0 | null | 2022-12-10T05:41:57 | 2020-03-20T10:55:55 | Python | UTF-8 | Python | false | false | 835 | py | # /usr/bin/python3.6
# -*- coding:utf-8 -*-
import itertools
class Solution(object):
def judgePoint24(self, nums):
"""
:type nums: List[int]
:rtype: bool
"""
if len(nums) == 1:
return abs(nums[0] - 24) < 0.0001
for x in itertools.permutations(nums):
x = list(x)
a, b = x[:2]
for y in (a*b, a-b, a+b):
if self.judgePoint24(x[2:] +[y]):
return True
if b != 0:
if self.judgePoint24(x[2:]+[a/b]):
return True
return False
def main():
s = Solution()
print(s.judgePoint24([8,7,1,4]))
print(s.judgePoint24([5,5,8,4]))
print(s.judgePoint24([1,2,1,2]))
print(s.judgePoint24([1,3,4,6]))
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
86853f144a8219477f6bf97079bb8cb4a5a2677d | fae94c0dcb251ea5854e33e81369140ca75cfaf5 | /src/bloombox/schema/shop/v1/ShopService_v1_pb2.py | 8224ee952460abdcdf177193651f5e7d434fb304 | [
"Apache-2.0"
]
| permissive | fagan2888/Python | 425654b18055233949aa7e6181e0b2652975e185 | 1b125fbdf54efb390afe12aaa966f093218c4387 | refs/heads/master | 2020-12-13T14:17:59.452500 | 2018-06-18T16:16:30 | 2018-06-18T16:16:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | true | 74,517 | py | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: shop/v1/ShopService_v1.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from identity import User_pb2 as identity_dot_User__pb2
from person import Person_pb2 as person_dot_Person__pb2
from commerce import Order_pb2 as commerce_dot_Order__pb2
from commerce import Customer_pb2 as commerce_dot_Customer__pb2
from identity import ID_pb2 as identity_dot_ID__pb2
from identity.ids import UserDoctorRec_pb2 as identity_dot_ids_dot_UserDoctorRec__pb2
from services import ServiceStatus_pb2 as services_dot_ServiceStatus__pb2
from partner import LocationKey_pb2 as partner_dot_LocationKey__pb2
from partner.settings import PartnerLocationSettings_pb2 as partner_dot_settings_dot_PartnerLocationSettings__pb2
from contact import EmailAddress_pb2 as contact_dot_EmailAddress__pb2
from contact import PhoneNumber_pb2 as contact_dot_PhoneNumber__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='shop/v1/ShopService_v1.proto',
package='bloombox.schema.services.shop.v1',
syntax='proto3',
serialized_pb=_b('\n\x1cshop/v1/ShopService_v1.proto\x12 bloombox.schema.services.shop.v1\x1a\x13identity/User.proto\x1a\x13person/Person.proto\x1a\x14\x63ommerce/Order.proto\x1a\x17\x63ommerce/Customer.proto\x1a\x11identity/ID.proto\x1a identity/ids/UserDoctorRec.proto\x1a\x1cservices/ServiceStatus.proto\x1a\x19partner/LocationKey.proto\x1a.partner/settings/PartnerLocationSettings.proto\x1a\x1a\x63ontact/EmailAddress.proto\x1a\x19\x63ontact/PhoneNumber.proto\x1a\x1cgoogle/api/annotations.proto\"\xe8\x01\n\x04Ping\x1a\t\n\x07Request\x1a\x43\n\x08Response\x12\x37\n\x06status\x18\x01 \x01(\x0e\x32\'.bloombox.schema.services.ServiceStatus\x1a\x8f\x01\n\tOperation\x12?\n\x07request\x18\x01 \x01(\x0b\x32..bloombox.schema.services.shop.v1.Ping.Request\x12\x41\n\x08response\x18\x02 \x01(\x0b\x32/.bloombox.schema.services.shop.v1.Ping.Response\"\xb6\x02\n\x08ShopInfo\x1a\x41\n\x07Request\x12\x36\n\x08location\x18\x01 \x01(\x0b\x32$.bloombox.schema.partner.LocationKey\x1aM\n\x08Response\x12\x41\n\x0bshop_status\x18\x01 \x01(\x0e\x32,.bloombox.schema.partner.settings.ShopStatus\x1a\x97\x01\n\tOperation\x12\x43\n\x07request\x18\x01 \x01(\x0b\x32\x32.bloombox.schema.services.shop.v1.ShopInfo.Request\x12\x45\n\x08response\x18\x02 \x01(\x0b\x32\x33.bloombox.schema.services.shop.v1.ShopInfo.Response\"\xc8\x05\n\x0c\x45nrollMember\x1a\x99\x03\n\x07Request\x12+\n\x06person\x18\x01 \x01(\x0b\x32\x1b.opencannabis.person.Person\x12:\n\x06source\x18\x02 \x01(\x0e\x32*.bloombox.schema.identity.EnrollmentSource\x12\x0f\n\x07\x63hannel\x18\x03 \x01(\t\x12?\n\ndoctor_rec\x18\x04 \x01(\x0b\x32+.bloombox.schema.identity.ids.UserDoctorRec\x12\x33\n\rgovernment_id\x18\x05 \x01(\x0b\x32\x1c.bloombox.schema.identity.ID\x12\x36\n\x08location\x18\x06 \x01(\x0b\x32$.bloombox.schema.partner.LocationKey\x12\x10\n\x08password\x18\x07 \x01(\t\x12\x0f\n\x07\x64ry_run\x18\x08 \x01(\x08\x12\x43\n\x10\x63onsumer_profile\x18\t \x01(\x0b\x32).bloombox.schema.identity.ConsumerProfile\x1az\n\x08Response\x12\n\n\x02id\x18\x01 \x01(\t\x12\x14\n\nforeign_id\x18\x02 \x01(\tH\x00\x12\x42\n\x05\x65rror\x18\x03 \x01(\x0e\x32\x31.bloombox.schema.services.shop.v1.EnrollmentErrorH\x00\x42\x08\n\x06result\x1a\x9f\x01\n\tOperation\x12G\n\x07request\x18\x01 \x01(\x0b\x32\x36.bloombox.schema.services.shop.v1.EnrollMember.Request\x12I\n\x08response\x18\x02 \x01(\x0b\x32\x37.bloombox.schema.services.shop.v1.EnrollMember.Response\"\xa8\x03\n\x0cVerifyMember\x1aX\n\x07Request\x12\x15\n\remail_address\x18\x01 \x01(\t\x12\x36\n\x08location\x18\x02 \x01(\x0b\x32$.bloombox.schema.partner.LocationKey\x1a\x9b\x01\n\x08Response\x12\x10\n\x08verified\x18\x01 \x01(\x08\x12\x33\n\x08\x63ustomer\x18\x02 \x01(\x0b\x32\x1f.opencannabis.commerce.CustomerH\x00\x12>\n\x05\x65rror\x18\x03 \x01(\x0e\x32-.bloombox.schema.services.shop.v1.VerifyErrorH\x00\x42\x08\n\x06result\x1a\x9f\x01\n\tOperation\x12G\n\x07request\x18\x01 \x01(\x0b\x32\x36.bloombox.schema.services.shop.v1.VerifyMember.Request\x12I\n\x08response\x18\x02 \x01(\x0b\x32\x37.bloombox.schema.services.shop.v1.VerifyMember.Response\"\xbd\x02\n\x0c\x43heckZipcode\x1aR\n\x07Request\x12\x0f\n\x07zipcode\x18\x01 \x01(\t\x12\x36\n\x08location\x18\x02 \x01(\x0b\x32$.bloombox.schema.partner.LocationKey\x1a\x37\n\x08Response\x12\x11\n\tsupported\x18\x01 \x01(\x08\x12\x18\n\x10\x64\x65livery_minimum\x18\x02 \x01(\x02\x1a\x9f\x01\n\tOperation\x12G\n\x07request\x18\x01 \x01(\x0b\x32\x36.bloombox.schema.services.shop.v1.CheckZipcode.Request\x12I\n\x08response\x18\x02 \x01(\x0b\x32\x37.bloombox.schema.services.shop.v1.CheckZipcode.Response\"\xd8\x03\n\nShareOrder\x1a\xe2\x01\n\x07Request\x12\x39\n\remail_address\x18\x01 \x01(\x0b\x32\".opencannabis.contact.EmailAddress\x12\x37\n\x0cphone_number\x18\x02 \x01(\x0b\x32!.opencannabis.contact.PhoneNumber\x12+\n\x05order\x18\x03 \x01(\x0b\x32\x1c.opencannabis.commerce.Order\x12\x36\n\x08location\x18\x04 \x01(\x0b\x32$.bloombox.schema.partner.LocationKey\x1aG\n\x08Response\x12;\n\x05\x65rror\x18\x01 \x01(\x0e\x32,.bloombox.schema.services.shop.v1.ShareError\x1a\x9b\x01\n\tOperation\x12\x45\n\x07request\x18\x01 \x01(\x0b\x32\x34.bloombox.schema.services.shop.v1.ShareOrder.Request\x12G\n\x08response\x18\x02 \x01(\x0b\x32\x35.bloombox.schema.services.shop.v1.ShareOrder.Response\"\xf8\x02\n\x0bSubmitOrder\x1an\n\x07Request\x12+\n\x05order\x18\x01 \x01(\x0b\x32\x1c.opencannabis.commerce.Order\x12\x36\n\x08location\x18\x02 \x01(\x0b\x32$.bloombox.schema.partner.LocationKey\x1aY\n\x08Response\x12;\n\x05\x65rror\x18\x01 \x01(\x0e\x32,.bloombox.schema.services.shop.v1.OrderError\x12\x10\n\x08order_id\x18\x02 \x01(\t\x1a\x9d\x01\n\tOperation\x12\x46\n\x07request\x18\x01 \x01(\x0b\x32\x35.bloombox.schema.services.shop.v1.SubmitOrder.Request\x12H\n\x08response\x18\x02 \x01(\x0b\x32\x36.bloombox.schema.services.shop.v1.SubmitOrder.Response\"\x81\x03\n\x08GetOrder\x1aS\n\x07Request\x12\x10\n\x08order_id\x18\x01 \x01(\t\x12\x36\n\x08location\x18\x02 \x01(\x0b\x32$.bloombox.schema.partner.LocationKey\x1a\x85\x01\n\x08Response\x12\x0f\n\x07success\x18\x01 \x01(\x08\x12+\n\x05order\x18\x02 \x01(\x0b\x32\x1c.opencannabis.commerce.Order\x12;\n\x05\x65rror\x18\x03 \x01(\x0e\x32,.bloombox.schema.services.shop.v1.OrderError\x1a\x97\x01\n\tOperation\x12\x43\n\x07request\x18\x01 \x01(\x0b\x32\x32.bloombox.schema.services.shop.v1.GetOrder.Request\x12\x45\n\x08response\x18\x02 \x01(\x0b\x32\x33.bloombox.schema.services.shop.v1.GetOrder.Response*d\n\x0bVerifyError\x12\r\n\tNOT_FOUND\x10\x00\x12\x0f\n\x0bREC_EXPIRED\x10\x01\x12\x0e\n\nID_EXPIRED\x10\x02\x12\x13\n\x0fINVALID_PAYLOAD\x10\x03\x12\x10\n\x0cNOT_ELIGIBLE\x10\x04*\xcb\x02\n\nOrderError\x12\x0c\n\x08NO_ERROR\x10\x00\x12\x11\n\rINVALID_ORDER\x10\x01\x12\x15\n\x11USER_NOT_ELIGIBLE\x10\x02\x12\x18\n\x14ZIPCODE_NOT_ELIGIBLE\x10\x03\x12\x16\n\x12ZIPCODE_NOT_ACTIVE\x10\x04\x12\x1b\n\x17ZIPCODE_MINIMUM_NOT_MET\x10\x05\x12\x13\n\x0fINVALID_PARTNER\x10\x06\x12\x14\n\x10INVALID_LOCATION\x10\x07\x12\x14\n\x10INVALID_CUSTOMER\x10\x08\x12\x17\n\x13MISSING_DESTINATION\x10\t\x12\x11\n\rSHOP_NOT_OPEN\x10\n\x12\x1a\n\x16GLOBAL_MINIMUM_NOT_MET\x10\x0b\x12\x18\n\x14MEMBERSHIP_NOT_FOUND\x10\x0c\x12\x13\n\x0f\x44UPLICATE_ORDER\x10\r*\xa7\x03\n\x0f\x45nrollmentError\x12\x17\n\x13NO_ENROLLMENT_ERROR\x10\x00\x12\x11\n\rINVALID_EMAIL\x10\x01\x12\x10\n\x0cINVALID_NAME\x10\x02\x12\x11\n\rINVALID_PHONE\x10\x03\x12\x19\n\x15INVALID_DATE_OF_BIRTH\x10\x04\x12\x12\n\x0eINVALID_REC_ID\x10\x05\x12\x1a\n\x16INVALID_REC_EXPIRATION\x10\x06\x12\x1b\n\x17INVALID_REC_DOCTOR_NAME\x10\x07\x12\x1c\n\x18INVALID_REC_DOCTOR_PHONE\x10\x08\x12\x13\n\x0fINVALID_USDL_ID\x10\t\x12\x1b\n\x17INVALID_USDL_EXPIRATION\x10\x0b\x12\x1d\n\x19INVALID_USDL_JURISDICTION\x10\x0c\x12\x14\n\x10\x41\x43\x43OUNT_CONFLICT\x10\r\x12\x1a\n\x16\x41\x43\x43OUNT_CONFLICT_EMAIL\x10\x0e\x12\x1a\n\x16\x41\x43\x43OUNT_CONFLICT_PHONE\x10\x0f\x12\x1e\n\x1aINVALID_ENROLLMENT_PAYLOAD\x10\x63* \n\nShareError\x12\x12\n\x0eNO_SHARE_ERROR\x10\x00\x32\x80\x0f\n\x04Shop\x12~\n\x04Ping\x12..bloombox.schema.services.shop.v1.Ping.Request\x1a/.bloombox.schema.services.shop.v1.Ping.Response\"\x15\x82\xd3\xe4\x93\x02\x0f\x12\r/shop/v1/ping\x12\xca\x01\n\x08ShopInfo\x12\x32.bloombox.schema.services.shop.v1.ShopInfo.Request\x1a\x33.bloombox.schema.services.shop.v1.ShopInfo.Response\"U\x82\xd3\xe4\x93\x02O\x12M/shop/v1/partners/{location.partner.code}/locations/{location.code}/shop/info\x12\xee\x01\n\x0c\x45nrollMember\x12\x36.bloombox.schema.services.shop.v1.EnrollMember.Request\x1a\x37.bloombox.schema.services.shop.v1.EnrollMember.Response\"m\x82\xd3\xe4\x93\x02g\"K/shop/v1/partners/{location.partner.code}/locations/{location.code}/members:\x01*Z\x15\"\x10/shop/v1/members:\x01*\x12\xdf\x01\n\x0c\x43heckZipcode\x12\x36.bloombox.schema.services.shop.v1.CheckZipcode.Request\x1a\x37.bloombox.schema.services.shop.v1.CheckZipcode.Response\"^\x82\xd3\xe4\x93\x02X\x12V/shop/v1/partners/{location.partner.code}/locations/{location.code}/zipcheck/{zipcode}\x12\xeb\x01\n\x0cVerifyMember\x12\x36.bloombox.schema.services.shop.v1.VerifyMember.Request\x1a\x37.bloombox.schema.services.shop.v1.VerifyMember.Response\"j\x82\xd3\xe4\x93\x02\x64\x12\x62/shop/v1/partners/{location.partner.code}/locations/{location.code}/members/{email_address}/verify\x12\xb9\x02\n\x0bSubmitOrder\x12\x35.bloombox.schema.services.shop.v1.SubmitOrder.Request\x1a\x36.bloombox.schema.services.shop.v1.SubmitOrder.Response\"\xba\x01\x82\xd3\xe4\x93\x02\xb3\x01\"J/shop/v1/partners/{location.partner.code}/locations/{location.code}/orders:\x05orderZ^\"U/shop/v1/partners/{location.partner.code}/locations/{location.code}/orders/{order.id}:\x05order\x12\xf0\x01\n\x08GetOrder\x12\x32.bloombox.schema.services.shop.v1.GetOrder.Request\x1a\x33.bloombox.schema.services.shop.v1.GetOrder.Response\"{\x82\xd3\xe4\x93\x02u\x12U/shop/v1/partners/{location.partner.code}/locations/{location.code}/orders/{order_id}Z\x1c\x12\x1a/shop/v1/orders/{order_id}\x12\xba\x02\n\nShareOrder\x12\x34.bloombox.schema.services.shop.v1.ShareOrder.Request\x1a\x35.bloombox.schema.services.shop.v1.ShareOrder.Response\"\xbe\x01\x82\xd3\xe4\x93\x02\xb7\x01\"P/shop/v1/partners/{location.partner.code}/locations/{location.code}/orders/share:\x01*Z`\"[/shop/v1/partners/{location.partner.code}/locations/{location.code}/orders/{order.id}/share:\x01*B/\n#io.bloombox.schema.services.shop.v1H\x01P\x01\xa2\x02\x03\x42\x42Sb\x06proto3')
,
dependencies=[identity_dot_User__pb2.DESCRIPTOR,person_dot_Person__pb2.DESCRIPTOR,commerce_dot_Order__pb2.DESCRIPTOR,commerce_dot_Customer__pb2.DESCRIPTOR,identity_dot_ID__pb2.DESCRIPTOR,identity_dot_ids_dot_UserDoctorRec__pb2.DESCRIPTOR,services_dot_ServiceStatus__pb2.DESCRIPTOR,partner_dot_LocationKey__pb2.DESCRIPTOR,partner_dot_settings_dot_PartnerLocationSettings__pb2.DESCRIPTOR,contact_dot_EmailAddress__pb2.DESCRIPTOR,contact_dot_PhoneNumber__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_VERIFYERROR = _descriptor.EnumDescriptor(
name='VerifyError',
full_name='bloombox.schema.services.shop.v1.VerifyError',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NOT_FOUND', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='REC_EXPIRED', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ID_EXPIRED', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INVALID_PAYLOAD', index=3, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NOT_ELIGIBLE', index=4, number=4,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=3650,
serialized_end=3750,
)
_sym_db.RegisterEnumDescriptor(_VERIFYERROR)
VerifyError = enum_type_wrapper.EnumTypeWrapper(_VERIFYERROR)
_ORDERERROR = _descriptor.EnumDescriptor(
name='OrderError',
full_name='bloombox.schema.services.shop.v1.OrderError',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NO_ERROR', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INVALID_ORDER', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='USER_NOT_ELIGIBLE', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ZIPCODE_NOT_ELIGIBLE', index=3, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ZIPCODE_NOT_ACTIVE', index=4, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ZIPCODE_MINIMUM_NOT_MET', index=5, number=5,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INVALID_PARTNER', index=6, number=6,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INVALID_LOCATION', index=7, number=7,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INVALID_CUSTOMER', index=8, number=8,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MISSING_DESTINATION', index=9, number=9,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SHOP_NOT_OPEN', index=10, number=10,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GLOBAL_MINIMUM_NOT_MET', index=11, number=11,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MEMBERSHIP_NOT_FOUND', index=12, number=12,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DUPLICATE_ORDER', index=13, number=13,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=3753,
serialized_end=4084,
)
_sym_db.RegisterEnumDescriptor(_ORDERERROR)
OrderError = enum_type_wrapper.EnumTypeWrapper(_ORDERERROR)
_ENROLLMENTERROR = _descriptor.EnumDescriptor(
name='EnrollmentError',
full_name='bloombox.schema.services.shop.v1.EnrollmentError',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NO_ENROLLMENT_ERROR', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INVALID_EMAIL', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INVALID_NAME', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INVALID_PHONE', index=3, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INVALID_DATE_OF_BIRTH', index=4, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INVALID_REC_ID', index=5, number=5,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INVALID_REC_EXPIRATION', index=6, number=6,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INVALID_REC_DOCTOR_NAME', index=7, number=7,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INVALID_REC_DOCTOR_PHONE', index=8, number=8,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INVALID_USDL_ID', index=9, number=9,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INVALID_USDL_EXPIRATION', index=10, number=11,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INVALID_USDL_JURISDICTION', index=11, number=12,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ACCOUNT_CONFLICT', index=12, number=13,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ACCOUNT_CONFLICT_EMAIL', index=13, number=14,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ACCOUNT_CONFLICT_PHONE', index=14, number=15,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INVALID_ENROLLMENT_PAYLOAD', index=15, number=99,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4087,
serialized_end=4510,
)
_sym_db.RegisterEnumDescriptor(_ENROLLMENTERROR)
EnrollmentError = enum_type_wrapper.EnumTypeWrapper(_ENROLLMENTERROR)
_SHAREERROR = _descriptor.EnumDescriptor(
name='ShareError',
full_name='bloombox.schema.services.shop.v1.ShareError',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NO_SHARE_ERROR', index=0, number=0,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4512,
serialized_end=4544,
)
_sym_db.RegisterEnumDescriptor(_SHAREERROR)
ShareError = enum_type_wrapper.EnumTypeWrapper(_SHAREERROR)
NOT_FOUND = 0
REC_EXPIRED = 1
ID_EXPIRED = 2
INVALID_PAYLOAD = 3
NOT_ELIGIBLE = 4
NO_ERROR = 0
INVALID_ORDER = 1
USER_NOT_ELIGIBLE = 2
ZIPCODE_NOT_ELIGIBLE = 3
ZIPCODE_NOT_ACTIVE = 4
ZIPCODE_MINIMUM_NOT_MET = 5
INVALID_PARTNER = 6
INVALID_LOCATION = 7
INVALID_CUSTOMER = 8
MISSING_DESTINATION = 9
SHOP_NOT_OPEN = 10
GLOBAL_MINIMUM_NOT_MET = 11
MEMBERSHIP_NOT_FOUND = 12
DUPLICATE_ORDER = 13
NO_ENROLLMENT_ERROR = 0
INVALID_EMAIL = 1
INVALID_NAME = 2
INVALID_PHONE = 3
INVALID_DATE_OF_BIRTH = 4
INVALID_REC_ID = 5
INVALID_REC_EXPIRATION = 6
INVALID_REC_DOCTOR_NAME = 7
INVALID_REC_DOCTOR_PHONE = 8
INVALID_USDL_ID = 9
INVALID_USDL_EXPIRATION = 11
INVALID_USDL_JURISDICTION = 12
ACCOUNT_CONFLICT = 13
ACCOUNT_CONFLICT_EMAIL = 14
ACCOUNT_CONFLICT_PHONE = 15
INVALID_ENROLLMENT_PAYLOAD = 99
NO_SHARE_ERROR = 0
_PING_REQUEST = _descriptor.Descriptor(
name='Request',
full_name='bloombox.schema.services.shop.v1.Ping.Request',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=407,
serialized_end=416,
)
_PING_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='bloombox.schema.services.shop.v1.Ping.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='bloombox.schema.services.shop.v1.Ping.Response.status', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=418,
serialized_end=485,
)
_PING_OPERATION = _descriptor.Descriptor(
name='Operation',
full_name='bloombox.schema.services.shop.v1.Ping.Operation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='request', full_name='bloombox.schema.services.shop.v1.Ping.Operation.request', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='response', full_name='bloombox.schema.services.shop.v1.Ping.Operation.response', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=488,
serialized_end=631,
)
_PING = _descriptor.Descriptor(
name='Ping',
full_name='bloombox.schema.services.shop.v1.Ping',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[_PING_REQUEST, _PING_RESPONSE, _PING_OPERATION, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=399,
serialized_end=631,
)
_SHOPINFO_REQUEST = _descriptor.Descriptor(
name='Request',
full_name='bloombox.schema.services.shop.v1.ShopInfo.Request',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='location', full_name='bloombox.schema.services.shop.v1.ShopInfo.Request.location', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=646,
serialized_end=711,
)
_SHOPINFO_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='bloombox.schema.services.shop.v1.ShopInfo.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='shop_status', full_name='bloombox.schema.services.shop.v1.ShopInfo.Response.shop_status', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=713,
serialized_end=790,
)
_SHOPINFO_OPERATION = _descriptor.Descriptor(
name='Operation',
full_name='bloombox.schema.services.shop.v1.ShopInfo.Operation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='request', full_name='bloombox.schema.services.shop.v1.ShopInfo.Operation.request', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='response', full_name='bloombox.schema.services.shop.v1.ShopInfo.Operation.response', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=793,
serialized_end=944,
)
_SHOPINFO = _descriptor.Descriptor(
name='ShopInfo',
full_name='bloombox.schema.services.shop.v1.ShopInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[_SHOPINFO_REQUEST, _SHOPINFO_RESPONSE, _SHOPINFO_OPERATION, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=634,
serialized_end=944,
)
_ENROLLMEMBER_REQUEST = _descriptor.Descriptor(
name='Request',
full_name='bloombox.schema.services.shop.v1.EnrollMember.Request',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='person', full_name='bloombox.schema.services.shop.v1.EnrollMember.Request.person', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='source', full_name='bloombox.schema.services.shop.v1.EnrollMember.Request.source', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='channel', full_name='bloombox.schema.services.shop.v1.EnrollMember.Request.channel', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='doctor_rec', full_name='bloombox.schema.services.shop.v1.EnrollMember.Request.doctor_rec', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='government_id', full_name='bloombox.schema.services.shop.v1.EnrollMember.Request.government_id', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='location', full_name='bloombox.schema.services.shop.v1.EnrollMember.Request.location', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='password', full_name='bloombox.schema.services.shop.v1.EnrollMember.Request.password', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dry_run', full_name='bloombox.schema.services.shop.v1.EnrollMember.Request.dry_run', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='consumer_profile', full_name='bloombox.schema.services.shop.v1.EnrollMember.Request.consumer_profile', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=964,
serialized_end=1373,
)
_ENROLLMEMBER_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='bloombox.schema.services.shop.v1.EnrollMember.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='bloombox.schema.services.shop.v1.EnrollMember.Response.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='foreign_id', full_name='bloombox.schema.services.shop.v1.EnrollMember.Response.foreign_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='error', full_name='bloombox.schema.services.shop.v1.EnrollMember.Response.error', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='result', full_name='bloombox.schema.services.shop.v1.EnrollMember.Response.result',
index=0, containing_type=None, fields=[]),
],
serialized_start=1375,
serialized_end=1497,
)
_ENROLLMEMBER_OPERATION = _descriptor.Descriptor(
name='Operation',
full_name='bloombox.schema.services.shop.v1.EnrollMember.Operation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='request', full_name='bloombox.schema.services.shop.v1.EnrollMember.Operation.request', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='response', full_name='bloombox.schema.services.shop.v1.EnrollMember.Operation.response', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1500,
serialized_end=1659,
)
_ENROLLMEMBER = _descriptor.Descriptor(
name='EnrollMember',
full_name='bloombox.schema.services.shop.v1.EnrollMember',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[_ENROLLMEMBER_REQUEST, _ENROLLMEMBER_RESPONSE, _ENROLLMEMBER_OPERATION, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=947,
serialized_end=1659,
)
_VERIFYMEMBER_REQUEST = _descriptor.Descriptor(
name='Request',
full_name='bloombox.schema.services.shop.v1.VerifyMember.Request',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='email_address', full_name='bloombox.schema.services.shop.v1.VerifyMember.Request.email_address', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='location', full_name='bloombox.schema.services.shop.v1.VerifyMember.Request.location', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1678,
serialized_end=1766,
)
_VERIFYMEMBER_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='bloombox.schema.services.shop.v1.VerifyMember.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='verified', full_name='bloombox.schema.services.shop.v1.VerifyMember.Response.verified', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='customer', full_name='bloombox.schema.services.shop.v1.VerifyMember.Response.customer', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='error', full_name='bloombox.schema.services.shop.v1.VerifyMember.Response.error', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='result', full_name='bloombox.schema.services.shop.v1.VerifyMember.Response.result',
index=0, containing_type=None, fields=[]),
],
serialized_start=1769,
serialized_end=1924,
)
_VERIFYMEMBER_OPERATION = _descriptor.Descriptor(
name='Operation',
full_name='bloombox.schema.services.shop.v1.VerifyMember.Operation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='request', full_name='bloombox.schema.services.shop.v1.VerifyMember.Operation.request', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='response', full_name='bloombox.schema.services.shop.v1.VerifyMember.Operation.response', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1927,
serialized_end=2086,
)
_VERIFYMEMBER = _descriptor.Descriptor(
name='VerifyMember',
full_name='bloombox.schema.services.shop.v1.VerifyMember',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[_VERIFYMEMBER_REQUEST, _VERIFYMEMBER_RESPONSE, _VERIFYMEMBER_OPERATION, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1662,
serialized_end=2086,
)
_CHECKZIPCODE_REQUEST = _descriptor.Descriptor(
name='Request',
full_name='bloombox.schema.services.shop.v1.CheckZipcode.Request',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='zipcode', full_name='bloombox.schema.services.shop.v1.CheckZipcode.Request.zipcode', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='location', full_name='bloombox.schema.services.shop.v1.CheckZipcode.Request.location', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2105,
serialized_end=2187,
)
_CHECKZIPCODE_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='bloombox.schema.services.shop.v1.CheckZipcode.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='supported', full_name='bloombox.schema.services.shop.v1.CheckZipcode.Response.supported', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='delivery_minimum', full_name='bloombox.schema.services.shop.v1.CheckZipcode.Response.delivery_minimum', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2189,
serialized_end=2244,
)
_CHECKZIPCODE_OPERATION = _descriptor.Descriptor(
name='Operation',
full_name='bloombox.schema.services.shop.v1.CheckZipcode.Operation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='request', full_name='bloombox.schema.services.shop.v1.CheckZipcode.Operation.request', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='response', full_name='bloombox.schema.services.shop.v1.CheckZipcode.Operation.response', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2247,
serialized_end=2406,
)
_CHECKZIPCODE = _descriptor.Descriptor(
name='CheckZipcode',
full_name='bloombox.schema.services.shop.v1.CheckZipcode',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[_CHECKZIPCODE_REQUEST, _CHECKZIPCODE_RESPONSE, _CHECKZIPCODE_OPERATION, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2089,
serialized_end=2406,
)
_SHAREORDER_REQUEST = _descriptor.Descriptor(
name='Request',
full_name='bloombox.schema.services.shop.v1.ShareOrder.Request',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='email_address', full_name='bloombox.schema.services.shop.v1.ShareOrder.Request.email_address', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='phone_number', full_name='bloombox.schema.services.shop.v1.ShareOrder.Request.phone_number', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order', full_name='bloombox.schema.services.shop.v1.ShareOrder.Request.order', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='location', full_name='bloombox.schema.services.shop.v1.ShareOrder.Request.location', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2424,
serialized_end=2650,
)
_SHAREORDER_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='bloombox.schema.services.shop.v1.ShareOrder.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='error', full_name='bloombox.schema.services.shop.v1.ShareOrder.Response.error', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2652,
serialized_end=2723,
)
_SHAREORDER_OPERATION = _descriptor.Descriptor(
name='Operation',
full_name='bloombox.schema.services.shop.v1.ShareOrder.Operation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='request', full_name='bloombox.schema.services.shop.v1.ShareOrder.Operation.request', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='response', full_name='bloombox.schema.services.shop.v1.ShareOrder.Operation.response', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2726,
serialized_end=2881,
)
_SHAREORDER = _descriptor.Descriptor(
name='ShareOrder',
full_name='bloombox.schema.services.shop.v1.ShareOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[_SHAREORDER_REQUEST, _SHAREORDER_RESPONSE, _SHAREORDER_OPERATION, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2409,
serialized_end=2881,
)
_SUBMITORDER_REQUEST = _descriptor.Descriptor(
name='Request',
full_name='bloombox.schema.services.shop.v1.SubmitOrder.Request',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='order', full_name='bloombox.schema.services.shop.v1.SubmitOrder.Request.order', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='location', full_name='bloombox.schema.services.shop.v1.SubmitOrder.Request.location', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2899,
serialized_end=3009,
)
_SUBMITORDER_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='bloombox.schema.services.shop.v1.SubmitOrder.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='error', full_name='bloombox.schema.services.shop.v1.SubmitOrder.Response.error', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order_id', full_name='bloombox.schema.services.shop.v1.SubmitOrder.Response.order_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3011,
serialized_end=3100,
)
_SUBMITORDER_OPERATION = _descriptor.Descriptor(
name='Operation',
full_name='bloombox.schema.services.shop.v1.SubmitOrder.Operation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='request', full_name='bloombox.schema.services.shop.v1.SubmitOrder.Operation.request', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='response', full_name='bloombox.schema.services.shop.v1.SubmitOrder.Operation.response', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3103,
serialized_end=3260,
)
_SUBMITORDER = _descriptor.Descriptor(
name='SubmitOrder',
full_name='bloombox.schema.services.shop.v1.SubmitOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[_SUBMITORDER_REQUEST, _SUBMITORDER_RESPONSE, _SUBMITORDER_OPERATION, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2884,
serialized_end=3260,
)
_GETORDER_REQUEST = _descriptor.Descriptor(
name='Request',
full_name='bloombox.schema.services.shop.v1.GetOrder.Request',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='order_id', full_name='bloombox.schema.services.shop.v1.GetOrder.Request.order_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='location', full_name='bloombox.schema.services.shop.v1.GetOrder.Request.location', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3275,
serialized_end=3358,
)
_GETORDER_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='bloombox.schema.services.shop.v1.GetOrder.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='success', full_name='bloombox.schema.services.shop.v1.GetOrder.Response.success', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='order', full_name='bloombox.schema.services.shop.v1.GetOrder.Response.order', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='error', full_name='bloombox.schema.services.shop.v1.GetOrder.Response.error', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3361,
serialized_end=3494,
)
_GETORDER_OPERATION = _descriptor.Descriptor(
name='Operation',
full_name='bloombox.schema.services.shop.v1.GetOrder.Operation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='request', full_name='bloombox.schema.services.shop.v1.GetOrder.Operation.request', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='response', full_name='bloombox.schema.services.shop.v1.GetOrder.Operation.response', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3497,
serialized_end=3648,
)
_GETORDER = _descriptor.Descriptor(
name='GetOrder',
full_name='bloombox.schema.services.shop.v1.GetOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[_GETORDER_REQUEST, _GETORDER_RESPONSE, _GETORDER_OPERATION, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3263,
serialized_end=3648,
)
_PING_REQUEST.containing_type = _PING
_PING_RESPONSE.fields_by_name['status'].enum_type = services_dot_ServiceStatus__pb2._SERVICESTATUS
_PING_RESPONSE.containing_type = _PING
_PING_OPERATION.fields_by_name['request'].message_type = _PING_REQUEST
_PING_OPERATION.fields_by_name['response'].message_type = _PING_RESPONSE
_PING_OPERATION.containing_type = _PING
_SHOPINFO_REQUEST.fields_by_name['location'].message_type = partner_dot_LocationKey__pb2._LOCATIONKEY
_SHOPINFO_REQUEST.containing_type = _SHOPINFO
_SHOPINFO_RESPONSE.fields_by_name['shop_status'].enum_type = partner_dot_settings_dot_PartnerLocationSettings__pb2._SHOPSTATUS
_SHOPINFO_RESPONSE.containing_type = _SHOPINFO
_SHOPINFO_OPERATION.fields_by_name['request'].message_type = _SHOPINFO_REQUEST
_SHOPINFO_OPERATION.fields_by_name['response'].message_type = _SHOPINFO_RESPONSE
_SHOPINFO_OPERATION.containing_type = _SHOPINFO
_ENROLLMEMBER_REQUEST.fields_by_name['person'].message_type = person_dot_Person__pb2._PERSON
_ENROLLMEMBER_REQUEST.fields_by_name['source'].enum_type = identity_dot_User__pb2._ENROLLMENTSOURCE
_ENROLLMEMBER_REQUEST.fields_by_name['doctor_rec'].message_type = identity_dot_ids_dot_UserDoctorRec__pb2._USERDOCTORREC
_ENROLLMEMBER_REQUEST.fields_by_name['government_id'].message_type = identity_dot_ID__pb2._ID
_ENROLLMEMBER_REQUEST.fields_by_name['location'].message_type = partner_dot_LocationKey__pb2._LOCATIONKEY
_ENROLLMEMBER_REQUEST.fields_by_name['consumer_profile'].message_type = identity_dot_User__pb2._CONSUMERPROFILE
_ENROLLMEMBER_REQUEST.containing_type = _ENROLLMEMBER
_ENROLLMEMBER_RESPONSE.fields_by_name['error'].enum_type = _ENROLLMENTERROR
_ENROLLMEMBER_RESPONSE.containing_type = _ENROLLMEMBER
_ENROLLMEMBER_RESPONSE.oneofs_by_name['result'].fields.append(
_ENROLLMEMBER_RESPONSE.fields_by_name['foreign_id'])
_ENROLLMEMBER_RESPONSE.fields_by_name['foreign_id'].containing_oneof = _ENROLLMEMBER_RESPONSE.oneofs_by_name['result']
_ENROLLMEMBER_RESPONSE.oneofs_by_name['result'].fields.append(
_ENROLLMEMBER_RESPONSE.fields_by_name['error'])
_ENROLLMEMBER_RESPONSE.fields_by_name['error'].containing_oneof = _ENROLLMEMBER_RESPONSE.oneofs_by_name['result']
_ENROLLMEMBER_OPERATION.fields_by_name['request'].message_type = _ENROLLMEMBER_REQUEST
_ENROLLMEMBER_OPERATION.fields_by_name['response'].message_type = _ENROLLMEMBER_RESPONSE
_ENROLLMEMBER_OPERATION.containing_type = _ENROLLMEMBER
_VERIFYMEMBER_REQUEST.fields_by_name['location'].message_type = partner_dot_LocationKey__pb2._LOCATIONKEY
_VERIFYMEMBER_REQUEST.containing_type = _VERIFYMEMBER
_VERIFYMEMBER_RESPONSE.fields_by_name['customer'].message_type = commerce_dot_Customer__pb2._CUSTOMER
_VERIFYMEMBER_RESPONSE.fields_by_name['error'].enum_type = _VERIFYERROR
_VERIFYMEMBER_RESPONSE.containing_type = _VERIFYMEMBER
_VERIFYMEMBER_RESPONSE.oneofs_by_name['result'].fields.append(
_VERIFYMEMBER_RESPONSE.fields_by_name['customer'])
_VERIFYMEMBER_RESPONSE.fields_by_name['customer'].containing_oneof = _VERIFYMEMBER_RESPONSE.oneofs_by_name['result']
_VERIFYMEMBER_RESPONSE.oneofs_by_name['result'].fields.append(
_VERIFYMEMBER_RESPONSE.fields_by_name['error'])
_VERIFYMEMBER_RESPONSE.fields_by_name['error'].containing_oneof = _VERIFYMEMBER_RESPONSE.oneofs_by_name['result']
_VERIFYMEMBER_OPERATION.fields_by_name['request'].message_type = _VERIFYMEMBER_REQUEST
_VERIFYMEMBER_OPERATION.fields_by_name['response'].message_type = _VERIFYMEMBER_RESPONSE
_VERIFYMEMBER_OPERATION.containing_type = _VERIFYMEMBER
_CHECKZIPCODE_REQUEST.fields_by_name['location'].message_type = partner_dot_LocationKey__pb2._LOCATIONKEY
_CHECKZIPCODE_REQUEST.containing_type = _CHECKZIPCODE
_CHECKZIPCODE_RESPONSE.containing_type = _CHECKZIPCODE
_CHECKZIPCODE_OPERATION.fields_by_name['request'].message_type = _CHECKZIPCODE_REQUEST
_CHECKZIPCODE_OPERATION.fields_by_name['response'].message_type = _CHECKZIPCODE_RESPONSE
_CHECKZIPCODE_OPERATION.containing_type = _CHECKZIPCODE
_SHAREORDER_REQUEST.fields_by_name['email_address'].message_type = contact_dot_EmailAddress__pb2._EMAILADDRESS
_SHAREORDER_REQUEST.fields_by_name['phone_number'].message_type = contact_dot_PhoneNumber__pb2._PHONENUMBER
_SHAREORDER_REQUEST.fields_by_name['order'].message_type = commerce_dot_Order__pb2._ORDER
_SHAREORDER_REQUEST.fields_by_name['location'].message_type = partner_dot_LocationKey__pb2._LOCATIONKEY
_SHAREORDER_REQUEST.containing_type = _SHAREORDER
_SHAREORDER_RESPONSE.fields_by_name['error'].enum_type = _SHAREERROR
_SHAREORDER_RESPONSE.containing_type = _SHAREORDER
_SHAREORDER_OPERATION.fields_by_name['request'].message_type = _SHAREORDER_REQUEST
_SHAREORDER_OPERATION.fields_by_name['response'].message_type = _SHAREORDER_RESPONSE
_SHAREORDER_OPERATION.containing_type = _SHAREORDER
_SUBMITORDER_REQUEST.fields_by_name['order'].message_type = commerce_dot_Order__pb2._ORDER
_SUBMITORDER_REQUEST.fields_by_name['location'].message_type = partner_dot_LocationKey__pb2._LOCATIONKEY
_SUBMITORDER_REQUEST.containing_type = _SUBMITORDER
_SUBMITORDER_RESPONSE.fields_by_name['error'].enum_type = _ORDERERROR
_SUBMITORDER_RESPONSE.containing_type = _SUBMITORDER
_SUBMITORDER_OPERATION.fields_by_name['request'].message_type = _SUBMITORDER_REQUEST
_SUBMITORDER_OPERATION.fields_by_name['response'].message_type = _SUBMITORDER_RESPONSE
_SUBMITORDER_OPERATION.containing_type = _SUBMITORDER
_GETORDER_REQUEST.fields_by_name['location'].message_type = partner_dot_LocationKey__pb2._LOCATIONKEY
_GETORDER_REQUEST.containing_type = _GETORDER
_GETORDER_RESPONSE.fields_by_name['order'].message_type = commerce_dot_Order__pb2._ORDER
_GETORDER_RESPONSE.fields_by_name['error'].enum_type = _ORDERERROR
_GETORDER_RESPONSE.containing_type = _GETORDER
_GETORDER_OPERATION.fields_by_name['request'].message_type = _GETORDER_REQUEST
_GETORDER_OPERATION.fields_by_name['response'].message_type = _GETORDER_RESPONSE
_GETORDER_OPERATION.containing_type = _GETORDER
DESCRIPTOR.message_types_by_name['Ping'] = _PING
DESCRIPTOR.message_types_by_name['ShopInfo'] = _SHOPINFO
DESCRIPTOR.message_types_by_name['EnrollMember'] = _ENROLLMEMBER
DESCRIPTOR.message_types_by_name['VerifyMember'] = _VERIFYMEMBER
DESCRIPTOR.message_types_by_name['CheckZipcode'] = _CHECKZIPCODE
DESCRIPTOR.message_types_by_name['ShareOrder'] = _SHAREORDER
DESCRIPTOR.message_types_by_name['SubmitOrder'] = _SUBMITORDER
DESCRIPTOR.message_types_by_name['GetOrder'] = _GETORDER
DESCRIPTOR.enum_types_by_name['VerifyError'] = _VERIFYERROR
DESCRIPTOR.enum_types_by_name['OrderError'] = _ORDERERROR
DESCRIPTOR.enum_types_by_name['EnrollmentError'] = _ENROLLMENTERROR
DESCRIPTOR.enum_types_by_name['ShareError'] = _SHAREERROR
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Ping = _reflection.GeneratedProtocolMessageType('Ping', (_message.Message,), dict(
Request = _reflection.GeneratedProtocolMessageType('Request', (_message.Message,), dict(
DESCRIPTOR = _PING_REQUEST,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.Ping.Request)
))
,
Response = _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), dict(
DESCRIPTOR = _PING_RESPONSE,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.Ping.Response)
))
,
Operation = _reflection.GeneratedProtocolMessageType('Operation', (_message.Message,), dict(
DESCRIPTOR = _PING_OPERATION,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.Ping.Operation)
))
,
DESCRIPTOR = _PING,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.Ping)
))
_sym_db.RegisterMessage(Ping)
_sym_db.RegisterMessage(Ping.Request)
_sym_db.RegisterMessage(Ping.Response)
_sym_db.RegisterMessage(Ping.Operation)
ShopInfo = _reflection.GeneratedProtocolMessageType('ShopInfo', (_message.Message,), dict(
Request = _reflection.GeneratedProtocolMessageType('Request', (_message.Message,), dict(
DESCRIPTOR = _SHOPINFO_REQUEST,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.ShopInfo.Request)
))
,
Response = _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), dict(
DESCRIPTOR = _SHOPINFO_RESPONSE,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.ShopInfo.Response)
))
,
Operation = _reflection.GeneratedProtocolMessageType('Operation', (_message.Message,), dict(
DESCRIPTOR = _SHOPINFO_OPERATION,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.ShopInfo.Operation)
))
,
DESCRIPTOR = _SHOPINFO,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.ShopInfo)
))
_sym_db.RegisterMessage(ShopInfo)
_sym_db.RegisterMessage(ShopInfo.Request)
_sym_db.RegisterMessage(ShopInfo.Response)
_sym_db.RegisterMessage(ShopInfo.Operation)
EnrollMember = _reflection.GeneratedProtocolMessageType('EnrollMember', (_message.Message,), dict(
Request = _reflection.GeneratedProtocolMessageType('Request', (_message.Message,), dict(
DESCRIPTOR = _ENROLLMEMBER_REQUEST,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.EnrollMember.Request)
))
,
Response = _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), dict(
DESCRIPTOR = _ENROLLMEMBER_RESPONSE,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.EnrollMember.Response)
))
,
Operation = _reflection.GeneratedProtocolMessageType('Operation', (_message.Message,), dict(
DESCRIPTOR = _ENROLLMEMBER_OPERATION,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.EnrollMember.Operation)
))
,
DESCRIPTOR = _ENROLLMEMBER,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.EnrollMember)
))
_sym_db.RegisterMessage(EnrollMember)
_sym_db.RegisterMessage(EnrollMember.Request)
_sym_db.RegisterMessage(EnrollMember.Response)
_sym_db.RegisterMessage(EnrollMember.Operation)
VerifyMember = _reflection.GeneratedProtocolMessageType('VerifyMember', (_message.Message,), dict(
Request = _reflection.GeneratedProtocolMessageType('Request', (_message.Message,), dict(
DESCRIPTOR = _VERIFYMEMBER_REQUEST,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.VerifyMember.Request)
))
,
Response = _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), dict(
DESCRIPTOR = _VERIFYMEMBER_RESPONSE,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.VerifyMember.Response)
))
,
Operation = _reflection.GeneratedProtocolMessageType('Operation', (_message.Message,), dict(
DESCRIPTOR = _VERIFYMEMBER_OPERATION,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.VerifyMember.Operation)
))
,
DESCRIPTOR = _VERIFYMEMBER,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.VerifyMember)
))
_sym_db.RegisterMessage(VerifyMember)
_sym_db.RegisterMessage(VerifyMember.Request)
_sym_db.RegisterMessage(VerifyMember.Response)
_sym_db.RegisterMessage(VerifyMember.Operation)
CheckZipcode = _reflection.GeneratedProtocolMessageType('CheckZipcode', (_message.Message,), dict(
Request = _reflection.GeneratedProtocolMessageType('Request', (_message.Message,), dict(
DESCRIPTOR = _CHECKZIPCODE_REQUEST,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.CheckZipcode.Request)
))
,
Response = _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), dict(
DESCRIPTOR = _CHECKZIPCODE_RESPONSE,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.CheckZipcode.Response)
))
,
Operation = _reflection.GeneratedProtocolMessageType('Operation', (_message.Message,), dict(
DESCRIPTOR = _CHECKZIPCODE_OPERATION,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.CheckZipcode.Operation)
))
,
DESCRIPTOR = _CHECKZIPCODE,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.CheckZipcode)
))
_sym_db.RegisterMessage(CheckZipcode)
_sym_db.RegisterMessage(CheckZipcode.Request)
_sym_db.RegisterMessage(CheckZipcode.Response)
_sym_db.RegisterMessage(CheckZipcode.Operation)
ShareOrder = _reflection.GeneratedProtocolMessageType('ShareOrder', (_message.Message,), dict(
Request = _reflection.GeneratedProtocolMessageType('Request', (_message.Message,), dict(
DESCRIPTOR = _SHAREORDER_REQUEST,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.ShareOrder.Request)
))
,
Response = _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), dict(
DESCRIPTOR = _SHAREORDER_RESPONSE,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.ShareOrder.Response)
))
,
Operation = _reflection.GeneratedProtocolMessageType('Operation', (_message.Message,), dict(
DESCRIPTOR = _SHAREORDER_OPERATION,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.ShareOrder.Operation)
))
,
DESCRIPTOR = _SHAREORDER,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.ShareOrder)
))
_sym_db.RegisterMessage(ShareOrder)
_sym_db.RegisterMessage(ShareOrder.Request)
_sym_db.RegisterMessage(ShareOrder.Response)
_sym_db.RegisterMessage(ShareOrder.Operation)
SubmitOrder = _reflection.GeneratedProtocolMessageType('SubmitOrder', (_message.Message,), dict(
Request = _reflection.GeneratedProtocolMessageType('Request', (_message.Message,), dict(
DESCRIPTOR = _SUBMITORDER_REQUEST,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.SubmitOrder.Request)
))
,
Response = _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), dict(
DESCRIPTOR = _SUBMITORDER_RESPONSE,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.SubmitOrder.Response)
))
,
Operation = _reflection.GeneratedProtocolMessageType('Operation', (_message.Message,), dict(
DESCRIPTOR = _SUBMITORDER_OPERATION,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.SubmitOrder.Operation)
))
,
DESCRIPTOR = _SUBMITORDER,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.SubmitOrder)
))
_sym_db.RegisterMessage(SubmitOrder)
_sym_db.RegisterMessage(SubmitOrder.Request)
_sym_db.RegisterMessage(SubmitOrder.Response)
_sym_db.RegisterMessage(SubmitOrder.Operation)
GetOrder = _reflection.GeneratedProtocolMessageType('GetOrder', (_message.Message,), dict(
Request = _reflection.GeneratedProtocolMessageType('Request', (_message.Message,), dict(
DESCRIPTOR = _GETORDER_REQUEST,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.GetOrder.Request)
))
,
Response = _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), dict(
DESCRIPTOR = _GETORDER_RESPONSE,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.GetOrder.Response)
))
,
Operation = _reflection.GeneratedProtocolMessageType('Operation', (_message.Message,), dict(
DESCRIPTOR = _GETORDER_OPERATION,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.GetOrder.Operation)
))
,
DESCRIPTOR = _GETORDER,
__module__ = 'shop.v1.ShopService_v1_pb2'
# @@protoc_insertion_point(class_scope:bloombox.schema.services.shop.v1.GetOrder)
))
_sym_db.RegisterMessage(GetOrder)
_sym_db.RegisterMessage(GetOrder.Request)
_sym_db.RegisterMessage(GetOrder.Response)
_sym_db.RegisterMessage(GetOrder.Operation)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n#io.bloombox.schema.services.shop.v1H\001P\001\242\002\003BBS'))
_SHOP = _descriptor.ServiceDescriptor(
name='Shop',
full_name='bloombox.schema.services.shop.v1.Shop',
file=DESCRIPTOR,
index=0,
options=None,
serialized_start=4547,
serialized_end=6467,
methods=[
_descriptor.MethodDescriptor(
name='Ping',
full_name='bloombox.schema.services.shop.v1.Shop.Ping',
index=0,
containing_service=None,
input_type=_PING_REQUEST,
output_type=_PING_RESPONSE,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\017\022\r/shop/v1/ping')),
),
_descriptor.MethodDescriptor(
name='ShopInfo',
full_name='bloombox.schema.services.shop.v1.Shop.ShopInfo',
index=1,
containing_service=None,
input_type=_SHOPINFO_REQUEST,
output_type=_SHOPINFO_RESPONSE,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002O\022M/shop/v1/partners/{location.partner.code}/locations/{location.code}/shop/info')),
),
_descriptor.MethodDescriptor(
name='EnrollMember',
full_name='bloombox.schema.services.shop.v1.Shop.EnrollMember',
index=2,
containing_service=None,
input_type=_ENROLLMEMBER_REQUEST,
output_type=_ENROLLMEMBER_RESPONSE,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002g\"K/shop/v1/partners/{location.partner.code}/locations/{location.code}/members:\001*Z\025\"\020/shop/v1/members:\001*')),
),
_descriptor.MethodDescriptor(
name='CheckZipcode',
full_name='bloombox.schema.services.shop.v1.Shop.CheckZipcode',
index=3,
containing_service=None,
input_type=_CHECKZIPCODE_REQUEST,
output_type=_CHECKZIPCODE_RESPONSE,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002X\022V/shop/v1/partners/{location.partner.code}/locations/{location.code}/zipcheck/{zipcode}')),
),
_descriptor.MethodDescriptor(
name='VerifyMember',
full_name='bloombox.schema.services.shop.v1.Shop.VerifyMember',
index=4,
containing_service=None,
input_type=_VERIFYMEMBER_REQUEST,
output_type=_VERIFYMEMBER_RESPONSE,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002d\022b/shop/v1/partners/{location.partner.code}/locations/{location.code}/members/{email_address}/verify')),
),
_descriptor.MethodDescriptor(
name='SubmitOrder',
full_name='bloombox.schema.services.shop.v1.Shop.SubmitOrder',
index=5,
containing_service=None,
input_type=_SUBMITORDER_REQUEST,
output_type=_SUBMITORDER_RESPONSE,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\263\001\"J/shop/v1/partners/{location.partner.code}/locations/{location.code}/orders:\005orderZ^\"U/shop/v1/partners/{location.partner.code}/locations/{location.code}/orders/{order.id}:\005order')),
),
_descriptor.MethodDescriptor(
name='GetOrder',
full_name='bloombox.schema.services.shop.v1.Shop.GetOrder',
index=6,
containing_service=None,
input_type=_GETORDER_REQUEST,
output_type=_GETORDER_RESPONSE,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002u\022U/shop/v1/partners/{location.partner.code}/locations/{location.code}/orders/{order_id}Z\034\022\032/shop/v1/orders/{order_id}')),
),
_descriptor.MethodDescriptor(
name='ShareOrder',
full_name='bloombox.schema.services.shop.v1.Shop.ShareOrder',
index=7,
containing_service=None,
input_type=_SHAREORDER_REQUEST,
output_type=_SHAREORDER_RESPONSE,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\267\001\"P/shop/v1/partners/{location.partner.code}/locations/{location.code}/orders/share:\001*Z`\"[/shop/v1/partners/{location.partner.code}/locations/{location.code}/orders/{order.id}/share:\001*')),
),
])
_sym_db.RegisterServiceDescriptor(_SHOP)
DESCRIPTOR.services_by_name['Shop'] = _SHOP
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
]
| |
2b0320378d647c689a4538c2bfa5efe8740ce529 | 73861a871c77c460ccc6fa7662ef63880e69dd4e | /vision/extract.py | 1677208920e14b1b958350c6936e92f7d28057f8 | [
"MIT"
]
| permissive | bbengfort/financial-analysis | 7d2822f44407dd0733e49b36f61886afabebe95e | aa5b2d80af0df04f6171ae18c381380964867b98 | refs/heads/master | 2021-01-10T14:06:44.012138 | 2016-01-20T02:25:52 | 2016-01-20T02:25:52 | 47,262,743 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,550 | py | # vision.extract
# Extracts financial information from Finances.xlsx and writes them to CSVs.
#
# Author: Benjamin Bengfort <[email protected]>
# Created: Wed Dec 02 20:41:22 2015 -0500
#
# Copyright (C) 2015 University of Maryland
# For license information, see LICENSE.txt
#
# ID: extract.py [] [email protected] $
"""
Extracts financial information from Finances.xlsx and writes them to CSVs.
"""
##########################################################################
## Imports
##########################################################################
import os
import csv
from datetime import datetime
from vision.reader import SpreadsheetReader
##########################################################################
## Module Constants
##########################################################################
PROJECT = os.path.dirname(os.path.dirname(__file__))
FINANCES = os.path.join(PROJECT, "fixtures", "Finances.xlsx")
ACCOUNTS = os.path.join(PROJECT, "fixtures", "accounts.csv")
TRANSACT = os.path.join(PROJECT, "fixtures", "transactions.csv")
MONTH = "%b%y"
ACT_FLDS = [
u'Month', u'Account Type', u'Bank', u'Account Name', u'Beginning Balance', u'Ending Balance',
]
TRN_FLDS = [
u'Month', u'Date', u'Amount', u'From Account', u'To Account'
]
##########################################################################
## Extraction
##########################################################################
def extract(finances=FINANCES, accounts=ACCOUNTS, transact=TRANSACT):
"""
Reads the sheets from finances and writes out the accounts and
transactions to the correct locations.
"""
reader = SpreadsheetReader(finances)
with open(accounts, 'w') as af:
with open(transact, 'w') as tf:
act_writer = csv.DictWriter(af, ACT_FLDS)
trn_writer = csv.DictWriter(tf, TRN_FLDS)
act_writer.writeheader()
trn_writer.writeheader()
for month in reader.sheets:
if month.lower() == 'blank': continue
try:
sheet = reader.finances(month)
for a in sheet.accounts:
a['Month'] = sheet.date
act_writer.writerow(a)
for t in sheet.transactions:
t['Month'] = sheet.date
trn_writer.writerow(t)
except Exception as e:
print "{}: {}".format(month, e)
if __name__ == '__main__':
extract()
| [
"[email protected]"
]
| |
d59af3974388b65b5470435d958d55db47734bc1 | 8b3ca44ee3d990233e74655b7131d616094f70c2 | /experiments/sparsity/sameK_drug_sensitivity_gdsc/gaussian_gaussian_ard.py | 9efc1162ab1aaaf6f9e7b0ba0daf1917eab61758 | []
| no_license | zshwuhan/BMF_Priors | 8b8c54271285a72d2085a56a9475c0756f375e67 | 6a600da1c41f1ccde2f2ba99298b40e68fb9910a | refs/heads/master | 2021-05-13T19:10:07.203215 | 2017-12-01T13:30:21 | 2017-12-01T13:30:21 | 116,883,181 | 1 | 0 | null | 2018-01-09T23:36:13 | 2018-01-09T23:36:13 | null | UTF-8 | Python | false | false | 1,370 | py | '''
Measure sparsity experiment on the GDSC drug sensitivity dataset, with
the All Gaussian model (multivariate posterior) wih ARD.
'''
project_location = "/Users/thomasbrouwer/Documents/Projects/libraries/"
import sys
sys.path.append(project_location)
from BMF_Priors.code.models.bmf_gaussian_gaussian_ard import BMF_Gaussian_Gaussian_ARD
from BMF_Priors.data.drug_sensitivity.load_data import load_gdsc_ic50_integer
from BMF_Priors.experiments.sparsity.sparsity_experiment import sparsity_experiment
import matplotlib.pyplot as plt
''' Run the experiment. '''
R, M = load_gdsc_ic50_integer()
model_class = BMF_Gaussian_Gaussian_ARD
n_repeats = 10
stratify_rows = False
fractions_unknown = [0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
settings = {
'R': R,
'M': M,
'K': 5,
'hyperparameters': { 'alpha':1., 'beta':1., 'alpha0':1., 'beta0':1. },
'init': 'random',
'iterations': 250,
'burn_in': 200,
'thinning': 1,
}
fout = './results/performances_gaussian_gaussian_ard.txt'
average_performances, all_performances = sparsity_experiment(
n_repeats=n_repeats, fractions_unknown=fractions_unknown, stratify_rows=stratify_rows,
model_class=model_class, settings=settings, fout=fout)
''' Plot the performance. '''
plt.figure()
plt.title("Sparsity performances")
plt.plot(fractions_unknown, average_performances['MSE'])
plt.ylim(0,1000) | [
"[email protected]"
]
| |
832ddd7fd40b5386692cfa20df6d94a139502e50 | 32cb0be487895629ad1184ea25e0076a43abba0a | /LifePictorial/top/api/rest/TmallEaiOrderRefundGoodReturnCheckRequest.py | ce4941fbc54d218c06016543eb1831e1303432ae | []
| no_license | poorevil/LifePictorial | 6814e447ec93ee6c4d5b0f1737335601899a6a56 | b3cac4aa7bb5166608f4c56e5564b33249f5abef | refs/heads/master | 2021-01-25T08:48:21.918663 | 2014-03-19T08:55:47 | 2014-03-19T08:55:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 504 | py | '''
Created by auto_sdk on 2014-02-10 16:59:30
'''
from top.api.base import RestApi
class TmallEaiOrderRefundGoodReturnCheckRequest(RestApi):
def __init__(self,domain='gw.api.taobao.com',port=80):
RestApi.__init__(self,domain, port)
self.company_code = None
self.confirm_result = None
self.confirm_time = None
self.operator = None
self.refund_id = None
self.refund_phase = None
self.sid = None
def getapiname(self):
return 'tmall.eai.order.refund.good.return.check'
| [
"[email protected]"
]
| |
5f21d9bb1a6c40517ba06e2c323a7b65004c4df6 | 53dd5d2cfb79edc87f6c606bbfb7d0bedcf6da61 | /.history/EMR/age_sex_20190618145358.py | 3e1c90629ae0e75d79668dc2289b3d2582a601eb | []
| no_license | cyc19950621/python | 4add54894dc81187211aa8d45e5115903b69a182 | d184b83e73334a37d413306d3694e14a19580cb0 | refs/heads/master | 2020-04-11T20:39:34.641303 | 2019-07-02T12:54:49 | 2019-07-02T12:54:49 | 162,078,640 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,267 | py | # -*- coding:utf-8 -*-
import time
import math
import os
import sys
import os, os.path,shutil
import codecs
import EMRdef
import re
emrtxts = EMRdef.txttq(u'D:\DeepLearning ER\EHR-all')#txt目录提取
for emrtxt in emrtxts:
f = open(emrtxt,'r',errors="ignore")#中文加入errors
emrtxt = os.path.basename(emrtxt)
emrtxt_str = re.findall(r'(^.+?)\_',emrtxt)#提取ID
emrtxt = "".join(emrtxt_str)#转成str
out = []
for line in f.readlines():
line = re.sub(' ','' ,line)
line = re.sub('\n','' ,line)
line = ''.join(line)
if line=='男' or line=='男性':
out.append('M')
elif line =='女' or line=='女性':
out.append('W')
if line.find('岁')>-1:
line = re.sub('岁','',line)
line = ''.join(line)
out.append(line)
'''
se = int(line)
if se <=20:
a = 'Child'
elif se <=40:
a = 'Younth'
elif se <= 60:
a = 'Mid'
else:
a= 'old'
out.append(a)'''
output = ' '.join(out)
EMRdef.text_create(r'D:\DeepLearning ER\EHRage','.txt' ,emrtxt,output)
| [
"[email protected]"
]
| |
a1d2b1c6ef8f9835c41df4bfaac431dfad3a6bdd | e166decefaf4f99510c2389fc5ee01a4a9252054 | /test_mbcf.py | 1efd1cdabf351825550a6df06560c0535c06b487 | []
| no_license | fuhailin/Memory-based-collaborative-filtering | 61ccecab76c259450f7105f34f94dde7340da097 | 65dac5ab14215ec3a7b0d0f89e6d4831cc4d942e | refs/heads/master | 2022-09-21T18:22:57.186339 | 2022-09-01T02:57:46 | 2022-09-01T02:57:46 | 85,299,014 | 45 | 25 | null | 2022-09-01T02:57:47 | 2017-03-17T10:20:42 | Python | UTF-8 | Python | false | false | 121 | py | #coding=utf-8
class TestClass:
def test_one(self):
x = "this"
def test_two(self):
x = "hello"
| [
"[email protected]"
]
| |
72c3ee6ceac0833b76267a754488a2c3ded1d52d | 21034e55eab7fb8f6fb177c170f481bc434166bd | /lagou_requests/lg_job.py | 6b618a992a13dd98f3dde1c3e52497122935d6db | []
| no_license | mengguiyouziyi/lagou | e1475c5184781a47bcd233996c6efb6109ccb8bb | 839f93a31da96355e291ee21a038d0d8da8b25c8 | refs/heads/master | 2021-01-17T18:04:12.988615 | 2017-07-08T10:45:19 | 2017-07-08T10:45:19 | 94,430,859 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,545 | py | # coding=utf-8
from requests.sessions import session
import requests
import pymysql
import json
from scrapy.selector import Selector
import random, time, datetime
from multiprocessing import Pool
try:
import cookielib
except:
import http.cookiejar as cookielib
cookies = [
'user_trace_token=20170612170648-026cfbd0f39d4330b02cf404bac6d999; Hm_lvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1498480701,1498706048,1499060843,1499153853; LGUID=20170612170650-7898a127-4f4e-11e7-9ab4-5254005c3644; _ga=GA1.2.109776163.1497258409; index_location_city=%E5%8C%97%E4%BA%AC; JSESSIONID=ABAAABAABEEAAJA7A5B9089163E244FDE311078D9478A71; Hm_lpvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1499252175; LGRID=20170705185614-9052fc20-6170-11e7-a312-5254005c3644; _gid=GA1.2.1298607794.1499153855; TG-TRACK-CODE=hpage_code; SEARCH_ID=39aabb581df6400b9c84960635747ac4; fromsite="localhost:63342"; utm_source=""; X_HTTP_TOKEN=e4dcfb507ab9891e47a5a859901b9584; LGSID=20170705182748-977f4ef8-616c-11e7-a311-5254005c3644; PRE_UTM=; PRE_HOST=; PRE_SITE=; PRE_LAND=http%3A%2F%2Flocalhost%2Flagou%2Flg_scrp_170704%2Flagou%2Ftest.html%3F_ijt%3D4kj0j6gn5k4n2744rdvhhpgjd4; _gat=1; _putrc=4F9D5E0356A7F682; login=true; unick=%E5%AD%99%E7%AB%8B%E5%BB%BA; showExpriedIndex=1; showExpriedCompanyHome=1; showExpriedMyPublish=1; hasDeliver=1',
# 'user_trace_token=20170611162631-ac0e66cb-4e7f-11e7-83ee-525400f775ce; LGUID=20170611162631-ac0e6aae-4e7f-11e7-83ee-525400f775ce; fromsite="localhost:63342"; index_location_city=%E5%8C%97%E4%BA%AC; _ga=GA1.2.200265071.1497169588; Hm_lvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1497257843,1497270178,1497439331,1497675144',
# 'user_trace_token=20170612170648-026cfbd0f39d4330b02cf404bac6d999; Hm_lvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1497258409,1497577951; LGUID=20170612170650-7898a127-4f4e-11e7-9ab4-5254005c3644; _ga=GA1.2.109776163.1497258409; index_location_city=%E5%85%A8%E5%9B%BD; _gid=GA1.2.722219593.1497427964; SEARCH_ID=8ed5770ccb9d4b99b174c1d9e1b93fdd; JSESSIONID=ABAAABAABEEAAJA1B1DFAE62C3164498D1AAA9F6FDFA840; Hm_lpvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1497594024; LGRID=20170616142030-e5896203-525b-11e7-9c6d-5254005c3644; TG-TRACK-CODE=index_search; LGSID=20170616135506-5900b1fa-5258-11e7-9bc5-525400f775ce',
# 'user_trace_token=20170611162631-ac0e66cb-4e7f-11e7-83ee-525400f775ce; LGUID=20170611162631-ac0e6aae-4e7f-11e7-83ee-525400f775ce; fromsite="localhost:63342"; JSESSIONID=ABAAABAABEEAAJABF9E692B8A110EAC1E5B8D41DCB395EF; _gat=1; PRE_UTM=; PRE_HOST=; PRE_SITE=; PRE_LAND=https%3A%2F%2Fwww.lagou.com%2F; _putrc=4F9D5E0356A7F682; login=true; unick=%E5%AD%99%E7%AB%8B%E5%BB%BA; showExpriedIndex=1; showExpriedCompanyHome=1; showExpriedMyPublish=1; hasDeliver=1; TG-TRACK-CODE=index_search; _gid=GA1.2.805528246.1497530341; _ga=GA1.2.200265071.1497169588; LGSID=20170617125231-c53b4db5-5318-11e7-9c6f-5254005c3644; LGRID=20170617125253-d279455a-5318-11e7-9cb7-525400f775ce; Hm_lvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1497257843,1497270178,1497439331,1497675144; Hm_lpvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1497675166; SEARCH_ID=c420f95450ed43b1aac4e94524e157aa; index_location_city=%E5%8C%97%E4%BA%AC',
# 'user_trace_token=20170611162631-ac0e66cb-4e7f-11e7-83ee-525400f775ce; LGUID=20170611162631-ac0e6aae-4e7f-11e7-83ee-525400f775ce; fromsite="localhost:63342"; JSESSIONID=ABAAABAABEEAAJABF9E692B8A110EAC1E5B8D41DCB395EF; _putrc=4F9D5E0356A7F682; login=true; unick=%E5%AD%99%E7%AB%8B%E5%BB%BA; showExpriedIndex=1; showExpriedCompanyHome=1; showExpriedMyPublish=1; hasDeliver=1; TG-TRACK-CODE=index_search; _gid=GA1.2.805528246.1497530341; _ga=GA1.2.200265071.1497169588; Hm_lvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1497257843,1497270178,1497439331,1497675144; Hm_lpvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1497675187; LGRID=20170617125314-df4f1bb2-5318-11e7-9cb8-525400f775ce; SEARCH_ID=5220eaf2c7984c2a82cb444855c488f6; index_location_city=%E5%8C%97%E4%BA%AC',
# 'user_trace_token=20170612170648-026cfbd0f39d4330b02cf404bac6d999; Hm_lvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1497258409,1497577951; LGUID=20170612170650-7898a127-4f4e-11e7-9ab4-5254005c3644; _ga=GA1.2.109776163.1497258409; index_location_city=%E5%85%A8%E5%9B%BD; _gid=GA1.2.722219593.1497427964; SEARCH_ID=3a924c638aad4c7fb4b7ae36b26d440e; JSESSIONID=ABAAABAABEEAAJA1B1DFAE62C3164498D1AAA9F6FDFA840; Hm_lpvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1497678584; LGRID=20170617134952-c834c774-5320-11e7-9c70-5254005c3644; TG-TRACK-CODE=index_search; _gat=1; LGSID=20170617134948-c5c76755-5320-11e7-9cd0-525400f775ce; PRE_UTM=; PRE_HOST=; PRE_SITE=; PRE_LAND=https%3A%2F%2Fpassport.lagou.com%2Flogin%2Flogin.html%3Fmsg%3Dvalidation%26uStatus%3D2%26clientIp%3D36.110.41.42'
# 'user_trace_token=20170611162631-ac0e66cb-4e7f-11e7-83ee-525400f775ce; LGUID=20170611162631-ac0e6aae-4e7f-11e7-83ee-525400f775ce; fromsite="localhost:63342"; JSESSIONID=ABAAABAABEEAAJABF9E692B8A110EAC1E5B8D41DCB395EF; _putrc=4F9D5E0356A7F682; login=true; unick=%E5%AD%99%E7%AB%8B%E5%BB%BA; showExpriedIndex=1; showExpriedCompanyHome=1; showExpriedMyPublish=1; hasDeliver=1; TG-TRACK-CODE=index_search; _ga=GA1.2.200265071.1497169588; _gid=GA1.2.805528246.1497530341; LGSID=20170617134823-935e1867-5320-11e7-9c70-5254005c3644; PRE_UTM=; PRE_HOST=; PRE_SITE=https%3A%2F%2Fwww.lagou.com%2Fjobs%2Flist_%2520%25E5%258C%2597%25E4%25BA%25AC%25E9%2598%25BF%25E5%25B0%2594%25E6%25B3%2595%25E6%258A%2595%25E8%25B5%2584%25E9%259B%2586%25E5%259B%25A2%25E6%259C%2589%25E9%2599%2590%25E5%2585%25AC%25E5%258F%25B8%3Fcity%3D%25E5%258C%2597%25E4%25BA%25AC%26cl%3Dfalse%26fromSearch%3Dtrue%26labelWords%3D%26suginput%3D; PRE_LAND=https%3A%2F%2Fwww.lagou.com%2Fjobs%2Flist_%2520%25E5%258C%2597%25E4%25BA%25AC%25E4%25B8%25AD%25E7%2594%25B5%25E5%258D%2593%25E8%2583%25BD%25E6%2595%2599%25E8%2582%25B2%25E7%25A7%2591%25E6%258A%2580%25E6%259C%2589%25E9%2599%2590%25E5%2585%25AC%25E5%258F%25B8%3Fcity%3D%25E5%258C%2597%25E4%25BA%25AC%26cl%3Dfalse%26fromSearch%3Dtrue%26labelWords%3D%26suginput%3D; LGRID=20170617134823-935e1a54-5320-11e7-9c70-5254005c3644; Hm_lvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1497257843,1497270178,1497439331,1497675144; Hm_lpvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1497678496; SEARCH_ID=91273cccf7db411ba100db9e6a936bc5; index_location_city=%E5%8C%97%E4%BA%AC',
# 'user_trace_token=20170611162631-ac0e66cb-4e7f-11e7-83ee-525400f775ce; LGUID=20170611162631-ac0e6aae-4e7f-11e7-83ee-525400f775ce; fromsite="localhost:63342"; JSESSIONID=ABAAABAABEEAAJABF9E692B8A110EAC1E5B8D41DCB395EF; _putrc=4F9D5E0356A7F682; login=true; unick=%E5%AD%99%E7%AB%8B%E5%BB%BA; showExpriedIndex=1; showExpriedCompanyHome=1; showExpriedMyPublish=1; hasDeliver=1; TG-TRACK-CODE=index_search; PRE_UTM=; PRE_HOST=; PRE_SITE=https%3A%2F%2Fwww.lagou.com%2Fjobs%2Flist_%2520%25E5%258C%2597%25E4%25BA%25AC%25E9%2598%25BF%25E5%25B0%2594%25E6%25B3%2595%25E6%258A%2595%25E8%25B5%2584%25E9%259B%2586%25E5%259B%25A2%25E6%259C%2589%25E9%2599%2590%25E5%2585%25AC%25E5%258F%25B8%3Fcity%3D%25E5%258C%2597%25E4%25BA%25AC%26cl%3Dfalse%26fromSearch%3Dtrue%26labelWords%3D%26suginput%3D; PRE_LAND=https%3A%2F%2Fwww.lagou.com%2Fjobs%2Flist_%2520%25E5%258C%2597%25E4%25BA%25AC%25E4%25B8%25AD%25E7%2594%25B5%25E5%258D%2593%25E8%2583%25BD%25E6%2595%2599%25E8%2582%25B2%25E7%25A7%2591%25E6%258A%2580%25E6%259C%2589%25E9%2599%2590%25E5%2585%25AC%25E5%258F%25B8%3Fcity%3D%25E5%258C%2597%25E4%25BA%25AC%26cl%3Dfalse%26fromSearch%3Dtrue%26labelWords%3D%26suginput%3D; _gid=GA1.2.805528246.1497530341; _ga=GA1.2.200265071.1497169588; Hm_lvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1497257843,1497270178,1497439331,1497675144; Hm_lpvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1497679473; LGSID=20170617134823-935e1867-5320-11e7-9c70-5254005c3644; LGRID=20170617140441-da0efd17-5322-11e7-9c70-5254005c3644; SEARCH_ID=24cd7522dd974eb997ba288cbb648b7f; index_location_city=%E5%8C%97%E4%BA%AC',
# 'user_trace_token=20170612170648-026cfbd0f39d4330b02cf404bac6d999; Hm_lvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1497258409,1497577951; LGUID=20170612170650-7898a127-4f4e-11e7-9ab4-5254005c3644; _ga=GA1.2.109776163.1497258409; index_location_city=%E5%85%A8%E5%9B%BD; _gid=GA1.2.722219593.1497427964; SEARCH_ID=ef064afaad504da286fff87b92b32359; JSESSIONID=ABAAABAABEEAAJA1B1DFAE62C3164498D1AAA9F6FDFA840; Hm_lpvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1497678608; LGRID=20170617135014-d5c1980f-5320-11e7-9cd0-525400f775ce; TG-TRACK-CODE=index_search; LGSID=20170617134948-c5c76755-5320-11e7-9cd0-525400f775ce; PRE_UTM=; PRE_HOST=; PRE_SITE=; PRE_LAND=https%3A%2F%2Fpassport.lagou.com%2Flogin%2Flogin.html%3Fmsg%3Dvalidation%26uStatus%3D2%26clientIp%3D36.110.41.42',
]
headers = {
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
"HOST": "www.lagou.com",
"Referer": 'https://www.lagou.com/gongsi/j188673.html',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; rv:54.0) Gecko/20100101 Firefox/54.0',
'X-Requested-With': "XMLHttpRequest",
'Accept-Encoding': "gzip, deflate, br",
# 'Cookie': random.choice(cookies)
}
# proxyMeta = "http://H4XGPM790E93518D:[email protected]:9020"
# self.proxies = {
# "http": proxyMeta,
# "https": proxyMeta,
# }
session = session()
# session.cookies = cookielib.LWPCookieJar(filename="cookies.txt")
# try:
# session.cookies.load(ignore_discard=True)
# except:
# print ("cookie未能加载")
form_data = {
'companyId': 188673,
'pageNo': 1,
'pageSize': 10,
'positionFirstType': '全部',
}
url = 'https://www.lagou.com/gongsi/searchPosition.json'
res = session.post(url, headers=headers, data=form_data)
with open('text.json', 'w') as f:
f.write(res.text)
# session.cookies.save()
print(res.cookies) | [
"[email protected]"
]
| |
d35cb142ca658d669d5e931ec54d9e3e60ae2833 | f73fa6ce1b0df4ab5c4b3a37c27e61cf3ab1515c | /authapp/migrations/0002_auto_20210113_1639.py | b07e3d5162238755bc292026d331f526744eedea | []
| no_license | mr-Robot-777/geekshop | 15444f13fc2f97eba88eb0538407cb84635e3d66 | 8f7b2fc17ca7731813e444a3005073aa9ded2799 | refs/heads/master | 2023-02-28T10:01:23.801994 | 2021-02-05T16:14:16 | 2021-02-05T16:14:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 877 | py | # Generated by Django 2.2.17 on 2021-01-13 11:39
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('authapp', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='shopuser',
name='activation_key',
field=models.CharField(blank=True, max_length=128),
),
migrations.AddField(
model_name='shopuser',
name='activation_key_exires',
field=models.DateTimeField(default=datetime.datetime(2021, 1, 15, 11, 39, 56, 491627, tzinfo=utc)),
),
migrations.AlterField(
model_name='shopuser',
name='last_name',
field=models.CharField(blank=True, max_length=150, verbose_name='last name'),
),
]
| [
"[email protected]"
]
| |
69e03027619f9036ca92caea3d6929e383bb11cb | 3dcafd835cc14329d3d95fce96e4553009df1c59 | /mystic/svctools.py | d547f96e03ed22447edd81b49279762c543c3bda | [
"BSD-3-Clause"
]
| permissive | silky/mystic | e4856721a6fdb7eaae5e4351c02d486c930352a6 | 369bebe23e3460b37cba4a64d00da7461b6fb028 | refs/heads/master | 2020-12-03T03:50:33.154428 | 2013-10-25T03:22:18 | 2013-10-25T03:22:18 | 15,031,357 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,153 | py | #!/usr/bin/env python
#
# Patrick Hung.
"""
Simple utility functions for SV-classifications
"""
from numpy import zeros, multiply, ndarray, vectorize, array, dot, transpose, diag, sum
def KernelMatrix(X, k=dot):
n = X.shape[0]
Q = zeros((n,n))
for i in range(n):
for j in range(i, n):
Q[i,j] = k(X[i,:],X[j,:])
return Q + transpose(Q) - diag(Q.diagonal())
def WeightVector(alpha, X, y):
ay = (alpha * y).flatten()
aXy = transpose(ay * transpose(X))
return sum(aXy, 0)
def SupportVectors(alpha, y=None, eps = 0):
import mystic.svmtools
sv = svmtools.SupportVectors(alpha,eps)
if y == None:
return sv
else:
class1 = set((y>0).nonzero()[1])
class2 = set((y<0).nonzero()[1])
sv1 = class1.intersection(sv)
sv2 = class2.intersection(sv)
return list(sv1), list(sv2)
def Bias(alpha, X, y, kernel=dot):
"""Compute classification bias. """
sv1, sv2 = SupportVectors(alpha, y,eps=1e-6)
pt1, pt2 = X[sv1[0],:], X[sv2[0],:]
k1, k2 = kernel(X, pt1), kernel(X,pt2)
return -0.5 * (sum(alpha*y*k1) + sum(alpha*y*k2))
# end of file
| [
"mmckerns@968178ea-60bd-409e-af13-df8a517b6005"
]
| mmckerns@968178ea-60bd-409e-af13-df8a517b6005 |
006ba610db2b9012c049987a6af4aaf9bdbd2252 | 4f1dd1353b83d30e97abbff2c6b531cd538160e3 | /RoboBase.py | 8912a17c7d4874fd362a70e152be6c8b6c926233 | [
"MIT"
]
| permissive | titos-carrasco/RoboBase | c089f84f0eb622877f0ed8620aadd18af5109cdd | ac3fe692482a6bd73ad836d56336c76508731f2d | refs/heads/master | 2021-01-13T01:40:40.773039 | 2014-04-18T22:59:34 | 2014-04-18T22:59:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,744 | py | # -*- coding: utf-8 -*-
"""Bluetooth control for a generic Arduino based robot
"""
import serial
import threading
import time
class RoboBase:
"""Class to control the robot
Usage:
robot = DaguCar("/dev/rfcomm0")
if(robot.IsConnected()):
robot.SetMotors(-255, 255)
time.sleep(1)
robot.SetMotors(0, 0)
robot.Close()
"""
PACKET_LENGTH = 8;
def __init__(self, port, bauds=9600):
"""Create the robot object and open a connection to it.
Args:
port: The serial port to use (string)
bauds: The speed for the serial communication (integer)
Raises:
KeyboardInterrupt
"""
self._lock = threading.Lock()
self._ser = None
for t in range(4):
try:
self._ser = serial.Serial(port, baudrate=bauds, bytesize=8,
parity='N', stopbits=1, timeout=1)
self._Debug('RoboBase.Init: Connected to %s, %d bps' %
(port, bauds))
self._Debug('RoboBase.Init: Ignoring old data')
self._ConsumeOldData()
break
except serial.SerialException:
self._Debug('RoboBase.Init: SerialException')
except ValueError:
self._Debug('RoboBase.Init: ValueError')
except IOError:
self._Debug('RoboBase.Init: IOError')
except KeyboardInterrupt:
self._Debug('RoboBase.Init: KeyboardInterrupt')
raise
def _Lock(self):
"""Get an exclusive access to the robot."""
self._lock.acquire()
if(self._ser!=None and self._ser.isOpen()):
return True
else:
self._lock.release()
return False
def _Unlock(self):
"""Release the exclusive access to the robot."""
try:
self._lock.release()
except:
pass
def _Debug(self, val):
"""Simple console debug."""
print val
def _ConsumeOldData(self):
"""Consume data from latest requests"""
timeout = self._ser.timeout
self._ser.timeout = 1
while(True):
try:
self._ser.read(1000)
finally:
break
self._ser.timeout = timeout
def IsConnected(self):
"""True if connected to the robot."""
try:
if(self._ser.isOpen()):
return True
except:
pass
return False
def Close(self):
"""Close the connection to the robot."""
if(self._Lock()):
self._ser.close()
self._ser = None
self._Unlock()
# Commands for the robot
CMD_SET_MOTORS = 0x01
CMD_PING = 0x02
CMD_BEEP = 0x03
CMD_INFO = 0x04
def _SendCommand(self, packet):
"""Send a command to the robot.
Args:
packet: PACKET_LENGTH byte packets.
The first byte is the command (CMD_XX)
"""
self._ser.write(packet)
self._ser.flush()
r = self._ser.read(self.PACKET_LENGTH) # robot must return the packet
r = bytearray(r)
if(packet !=r ):
self._Debug('Packet Mismatch')
self._Debug(list(packet))
self._Debug(list(r))
def SetMotors(self, motor1, motor2):
"""Applies power to the motors
Args:
motor1, motor2 : power for the motor (-255 - 255)
0 = stop, <0 backward, >0 forward
"""
if(self._Lock()):
try:
motor1, motor2 = int(motor1), int(motor2)
if(motor1<0):
m1_dir = 0x00;
else:
m1_dir = 0x01
if(motor2<0):
m2_dir = 0x00
else:
m2_dir = 0x01
packet = bytearray(self.PACKET_LENGTH)
packet[0] = 0x01
packet[1] = m1_dir
packet[2] = abs(motor1) & 0xFF
packet[3] = m2_dir
packet[4] = abs(motor2) & 0XFF
self._SendCommand(packet)
except serial.SerialTimeoutException:
self._Debug('RoboBase.SetMotors: SerialTimeoutException')
except serial.SerialException:
self._Debug('RoboBase.SetMotors: SerialException')
except:
self._Debug('RoboBase.SetMotors: Unexpected Exception')
self._Unlock()
def Ping(self, max_distance):
"""Gets the distance reported by the ping sensor
Args:
max_distance: max distance for detection (integer)
Returns:
the distance to an obstacle
"""
r = 0
if(self._Lock()):
try:
max_distance = abs(int(max_distance)) & 0xFFFF
packet = bytearray(self.PACKET_LENGTH)
packet[0] = 0x02
packet[1] = (max_distance >> 8)
packet[2] = (max_distance & 0xFF)
self._SendCommand(packet)
r = self._Read2UBytes()/100.0
except serial.SerialTimeoutException:
self._Debug('RoboBase.Ping: SerialTimeoutException')
except serial.SerialException:
self._Debug('RoboBase.Ping: SerialException')
except:
self._Debug('RoboBase.Ping: Unexpected Exception')
self._Unlock()
return r
def Beep(self, freq, duration):
"""Make a sound
Args:
freq: frequency (integer)
duration: duration of the beep (integer) in milliseconds
"""
if(self._Lock()):
try:
freq = abs(int(freq)) & 0xFFFF
duration = abs(int(duration)) & 0XFFFF
packet = bytearray(self.PACKET_LENGTH)
packet[0] = 0x03
packet[1] = (freq >> 8)
packet[2] = (freq & 0xFF)
packet[3] = (duration >> 8)
packet[4] = (duration & 0xFF)
self._SendCommand(packet)
time.sleep(duration/1000.0)
except serial.SerialTimeoutException:
self._Debug('RoboBase.Beep: SerialTimeoutException')
except serial.SerialException:
self._Debug('RoboBase.Beep: SerialException')
except:
self._Debug('RoboBase.Beep: Unexpected Exception')
self._Unlock()
def GetInfo(self):
"""Get robot information
Returns:
Information about the robot
"""
r = ''
if(self._Lock()):
try:
packet = bytearray(self.PACKET_LENGTH)
packet[0] = 0x04
self._SendCommand(packet)
r = self._ReadLine()
except serial.SerialTimeoutException:
self._Debug('RoboBase.GetInfo: SerialTimeoutException')
except serial.SerialException:
self._Debug('RoboBase.GetInfo: SerialException')
except:
self._Debug('RoboBase.GetInfo: Unexpected Exception')
self._Unlock()
return r
###################################################################
def _ReadLine(self):
return self._ser.readline()
def _ReadBytes(self, n):
return self._ser.read(n)
def _Read1UByte(self):
return ord(self._ser.read(1))
def _Read2UBytes(self):
return (ord(self._ser.read(1)) << 8) + ord(self._ser.read(1))
| [
"[email protected]"
]
| |
87302d938e2f55c44d36d63480ba7cc1d616a017 | dbd87fe6e9466c4cada18b037667cfdddc62c193 | /data/AV Connectior/alpha_vantage/cryptocurrencies.py | 4af80e1a7b1669d87d1c5c63d89dc537cb296929 | []
| no_license | alexanu/Python_Trading_Snippets | 74515a40dc63ba50d95bd50330ed05d59b5dc837 | 85969e681b9c74e24e60cc524a952f9585ea9ce9 | refs/heads/main | 2023-06-25T03:27:45.813987 | 2023-06-09T16:09:43 | 2023-06-09T16:09:43 | 197,401,560 | 18 | 17 | null | 2023-02-08T22:25:25 | 2019-07-17T14:05:32 | Jupyter Notebook | UTF-8 | Python | false | false | 2,125 | py | from .alphavantage import AlphaVantage as av
class CryptoCurrencies(av):
"""This class implements all the crypto currencies API calls
Prices and volumes are quoted in both the market-specific currency and USD.
All the functions follow the same call structure :
Keyword Arguments:
symbol: The digital/crypto currency of your choice.
It can be any of the currencies in the digital currency list. For example symbol=BTC.
market: The exchange market of your choice.
It can be any of the market in the market list. For example: market=CNY.
"""
@av._output_format
@av._call_api_on_func
def get_digital_currency_daily(self, symbol, market):
""" Returns the daily historical time series for a digital currency
(e.g., BTC) traded on a specific market (e.g., CNY/Chinese Yuan),
refreshed daily at midnight (UTC).
"""
_FUNCTION_KEY = 'DIGITAL_CURRENCY_DAILY'
return _FUNCTION_KEY, 'Time Series (Digital Currency Daily)', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_digital_currency_weekly(self, symbol, market):
""" Returns the weekly historical time series for a digital currency
(e.g., BTC) traded on a specific market (e.g., CNY/Chinese Yuan),
refreshed daily at midnight (UTC). Prices and volumes are quoted in
both the market-specific currency and USD.
"""
_FUNCTION_KEY = 'DIGITAL_CURRENCY_WEEKLY'
return _FUNCTION_KEY, 'Time Series (Digital Currency Weekly)', 'Meta Data'
@av._output_format
@av._call_api_on_func
def get_digital_currency_monthly(self, symbol, market):
""" Returns the monthly historical time series for a digital currency
(e.g., BTC) traded on a specific market (e.g., CNY/Chinese Yuan),
refreshed daily at midnight (UTC). Prices and volumes are quoted in
both the market-specific currency and USD.
"""
_FUNCTION_KEY = 'DIGITAL_CURRENCY_MONTHLY'
return _FUNCTION_KEY, 'Time Series (Digital Currency Monthly)', 'Meta Data'
| [
"[email protected]"
]
| |
ba349bf428a3ad2f98c1478dcd08138dde07d944 | 76e62ddbfdfba19c80b37e855a4df67672ef0808 | /BIZa/2014/Novikova_J_V/Задача №4. Вариант 34.py | f7c4c6405a63589003c58083cc5a3d9c6a220461 | [
"Apache-2.0"
]
| permissive | stasvorosh/pythonintask | 9d30f3cd492e89783b7221402375c1ebe4690baa | 8169ed26510022fe0d589f4013f11749131957df | refs/heads/master | 2021-01-17T16:49:32.778063 | 2016-10-10T14:08:04 | 2016-10-10T14:08:04 | 52,255,539 | 6 | 0 | null | 2016-02-22T07:33:16 | 2016-02-22T07:33:15 | null | UTF-8 | Python | false | false | 1,402 | py | #Задача №4. Вариант 34.
#Напишите программу, которая выводит имя, под которым скрывается Мария Луиза Полякова — Байдарова. Дополнительно необходимо вывести область интересов указанной личности, место рождения, годы рождения и смерти (если человек умер), вычислить возраст на данный момент (или момент смерти). Для хранения всех необходимых данных требуется использовать переменные. После вывода информации программа должна дожидаться пока пользователь нажмет Enter для выхода
#Novikova J. V.
#26.04.2016
print('Мария Луиза Полякова — Байдарова, французская актриса, более известна как Влади Марина')
place='Клиши, Франция'
born='1938'
age='78'
interes='Кино'
print('Место рождения: '+place)
print('Год рождения: '+str(born))
print('Умерла в возрасте: '+str(age))
print('Область интересов: '+interes)
input('Нажмите Enter для выхода') | [
"[email protected]"
]
| |
8d9f6e547d53743658efe0b029a51b27da1edc5a | e45b050c8490afeff9b0a7212d46aee6f3227612 | /docs/conf.py | db378e42c4a4b8477c2be5ecd0f6da26ef947925 | [
"MIT"
]
| permissive | luzfcb/django-allauth | ec9c089b0e7fe12f903a258e1ebb2e842646c47c | 84e3ba0cc41beebe69bcf380554eb2e96ec2cfad | refs/heads/master | 2021-01-17T21:26:00.998997 | 2013-11-27T16:41:48 | 2013-11-27T16:41:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,802 | py | # -*- coding: utf-8 -*-
#
# django-allauth documentation build configuration file, created by
# sphinx-quickstart on Wed Jun 6 22:58:42 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'django-allauth'
copyright = u'2013, Raymond Penners'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.14.3-dev'
# The full version, including alpha/beta/rc tags.
release = '0.14.3-dev'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'django-allauthdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'django-allauth.tex', u'django-allauth Documentation',
u'Raymond Penners', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'django-allauth', u'django-allauth Documentation',
[u'Raymond Penners'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'django-allauth', u'django-allauth Documentation',
u'Raymond Penners', 'django-allauth', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| [
"[email protected]"
]
| |
d7b2079f01d6308c91b68f4e7309c6900690d40e | 8f8ac99fd3ed9ceb36778b404f6fdd0b6899d3f4 | /pyobjc-framework-Cocoa/PyObjCTest/test_nsdraggingitem.py | ed2c3d005e4b5cb0bdbcbe99f3b2c1d47e98bb2b | [
"MIT"
]
| permissive | strogo/pyobjc | ac4201c7742eb75348328eeecb7eedf4e3458de3 | 2579c5eaf44b0c5af77ee195c417d2c65e72dfda | refs/heads/master | 2023-07-13T00:41:56.448005 | 2021-08-24T06:42:53 | 2021-08-24T06:42:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 577 | py | import AppKit
from PyObjCTools.TestSupport import TestCase, min_os_level
class TestNSDraggingItem(TestCase):
@min_os_level("10.7")
def testConstants10_7(self):
self.assertIsInstance(AppKit.NSDraggingImageComponentIconKey, str)
self.assertIsInstance(AppKit.NSDraggingImageComponentLabelKey, str)
@min_os_level("10.7")
def testMethods10_7(self):
self.assertArgIsBlock(
AppKit.NSDraggingItem.setImageComponentsProvider_, 0, b"@"
)
self.assertResultIsBlock(AppKit.NSDraggingItem.imageComponentsProvider, b"@")
| [
"[email protected]"
]
| |
dabad33d58c5bdaefaa03a5af5b3b6a0977109c7 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03304/s545763766.py | 0659fb172e79807c2c54e0ef15d6b2bc8fc15172 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 105 | py | import decimal
n,m,d=map(int,input().split())
print(decimal.Decimal(((2**min(1,d))*(n-d)*(m-1))/(n**2)))
| [
"[email protected]"
]
| |
a025c41a715c278a37e8811487827c599e634f77 | 1f0b38e455ec949eb1285437373c496f46900955 | /Figures/early_SMBH_growth/redshift_vs_Mbh_growth_20200323.py | 006f80c26ed4dda1e3927ed1083fee38aa70d5f6 | []
| no_license | d80b2t/JWST_Cycle1 | 87fef5f58fca242e9df7717a609120be1cf01af0 | e6e7618640d4b35cff528304e475fed1ee0231c5 | refs/heads/master | 2021-05-12T19:43:28.468189 | 2020-03-23T17:42:21 | 2020-03-23T17:42:21 | 117,101,074 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,109 | py | '''
WISE detections and colors of Very High redshift quasars
'''
import math
import numpy as np
from astropy.io import fits
from astropy.io import ascii
from astropy.table import Table
from astropy.table import Table
import astropy.units as u
import matplotlib
import matplotlib.pyplot as plt
from matplotlib import colors as mcolors
from matplotlib import gridspec
from astropy.cosmology import FlatLambdaCDM
from astropy.cosmology import z_at_value
## Setting up the cosmology...
cosmo = FlatLambdaCDM(H0=68.0, Om0=0.31) #Banados thesis
#ages = np.array([13, 10, 8, 6, 5, 4, 3, 2, 1.5, 1.2, 1, 0.8, 0.70, 0.50, 0.25, 0.10])*u.Gyr
ages = np.array([13, 10, 8, 6, 5, 4, 3, 2, 1.5, 1.25, 0.75, 0.50, 0.25, 0.10])*u.Gyr
ageticks = [z_at_value(cosmo.age, age) for age in ages]
redshifts = np.array([6, 7, 8, 9, 10, 12, 15, 20])
redshiftticks = [cosmo.age(redshift).value for redshift in redshifts]
##
## READ-IN THE D A T A F I L E (S)
## Inayoshi, Visbal, Haiman Annu. Rev. Astron. Astrophys. 2019. 58:1–79
filename = 'Inayoshi_2019_ARAA_203quasars.dat'
VHzQs = ascii.read(filename, delimiter=r'\s', guess=False)
z_VHzQs = VHzQs['redshift']
log_MBH_VHzQs = np.log10(VHzQs['Mbh'])
age_VHzQs = cosmo.age(z_VHzQs).value
## Trakhtenbrot et al. (2011) z=4.8 objects
## name, redshift, L_bol, log_MBH, l_Edd
## J143+0635 4.850 46.98 8.99 -0.19
path = '/cos_pc19a_npr/data/highest_z_QSOs/Trakhtenbrot2011/'
filename = 'Table2.dat'
Trak11 = ascii.read(path+filename, delimiter=r'\s', guess=False)
z_Trak11 = Trak11['redshift']
log_MBH_Trak11 = Trak11['log_MBH']
age_Trak11 = cosmo.age(z_Trak11).value
## The 3 monsters in Banados et al, 2018, Nature, 553, 473
## name, redshift, L_bol, log_MBH, l_Edd
## J143+0635 4.850 46.98 8.99 -0.19
path = '/cos_pc19a_npr/data/highest_z_QSOs/Mbh_values/'
filename = 'Banados_2018_Fig2.dat'
Banados = ascii.read(path+filename, delimiter=r'\s', guess=False)
log_MBH_Bana = np.log10(Banados['MBH'])
z_Bana = Banados['redshift']
filename = 'Gallerani_2017.dat'
Gallerani = ascii.read(path+filename, delimiter=r'\s', guess=False)
log_MBH_Gall = np.log10(Gallerani['M_BH'])
z_Gall = Gallerani['redshift']
filename = 'Top10.dat'
Top10 = ascii.read(path+filename, delimiter=r'\s', guess=False)
log_MBH_Top10 = np.log10(Top10['MBH'])
z_Top10 = Top10['redshift']
##
## Salpeter timescales,
## timescale for BH growth, based upon the Eddington limit: a growing
## black hole heats accretion material, which glows and is subject to
## the luminosity limit. The timescale is 5e7 years.
## Start and end redshift.
zrange = np.arange(3, 35., 0.02)
ee = [cosmo.age(zz).value for zz in zrange]
t_bana = np.array(ee)*1e9
## Some physical values
Ledd = 1.0
## Hold M_seed constant, vary eta
M_seed = 1000.0
eta_variable = [0.10, 0.11, 0.125, 0.14, 0.15]
eta_label = ['0.10', '0.11', '0.125', '0.14', '0.15']
s = (len(t_bana),len(eta_variable))
M_BH_grower_eta = np.zeros(s)
for ii in range(len(eta_variable)):
t_salpeter = 4.5e7 * (eta_variable[ii]/0.1) * (Ledd**(-1))
M_BH_grower_eta[:,ii] = (np.exp(t_bana/t_salpeter))*M_seed
## Hold eta constant, vary M_seed
## bit more interesting since want to see that range of MBH_seeds
## that are viable
eta = 0.10
t_salpeter = 4.5e7*(eta/0.1)*(Ledd**(-1))
Mseed_variable = [1.0, 10., 100.0, 1000, 10000.]
s = (len(t_bana),len(Mseed_variable))
M_BH_grower_MBHseed = np.zeros(s)
for jj in range(len(Mseed_variable)):
M_BH_grower_MBHseed[:,jj] = (np.exp(t_bana/t_salpeter)) * (Mseed_variable[jj])
##
## Making the plot
##
fig, ax1 = plt.subplots(figsize=(14.0, 10.0))
## May fave new line ;-=)
plt.style.use('dark_background')
plt.rcParams.update({'font.size': 14})
matplotlib.rc('text', usetex=True)
matplotlib.rcParams['lines.linewidth'] = 22 #does this do anything??!!
## Adjusting the Whitespace for the plots
left = 0.14 # the left side of the subplots of the figure
right = 0.94 # the right side of the subplots of the figure
bottom = 0.16 # the bottom of the subplots of the figure
top = 0.88 # the top of the subplots of the figure
wspace = 0.26 # the amount of width reserved for blank space between subplots
hspace = 0.06 # the amount of height reserved for white space between subplots
plt.subplots_adjust(left=left, bottom=bottom, right=right, top=top, wspace=wspace, hspace=hspace)
## Some NPR defaults
ls = 'solid'
lw = 1.0
ms_large = 250
ms = ms_large/3.
alpha = 1.0
fontsize = 36
labelsize = fontsize
tickwidth = 2.0
linewidth = 2.4
tickwidth = 2.0
ticklength = 6.0
ticklabelsize = labelsize
majorticklength = 12
minorticklength = 6
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
minorLocator = MultipleLocator(5)
## define the colormap
cmap = plt.cm.jet ## great with 'dark_background'
cmap = plt.cm.viridis
## AGE RANGE
xmin = 0.20 # Gyr
xmax = 1.4
## REDSHIFT RANGE
zmin = 4.8 ## 5.8 ## 3.0 ## 4.3
zmax = 38.0 ## 16.0 ## z=45 in Banados 2018
## Mass access
ymin = 0.5 # 6.5 2.8 good if redshift is z= 45.
ymax = 10.5 # 10.5
c = VHzQs['M1450']
#cmap = plt.cm.jet ## great with 'dark_background'
cmap = plt.cm.viridis_r
## Plotting the quasars...
ax1.scatter(z_VHzQs, log_MBH_VHzQs, c=c, cmap=cmap, marker="P", s=(ms_large*1.2), label="$z>$6 QSOs", zorder=12)
#ax1.scatter(z_Top10, log_MBH_Top10, c='b', marker="s", s=ms_large, label="Highest-$z$/most massive", zorder=10)
#ax1.scatter(z_Trak11, log_MBH_Trak11, c='k', marker="d", s=ms_large, label="Trakhtenbrot (2011)", zorder=10)
#ax1.scatter(z_Trak11, log_MBH_Trak11, c='silver', marker="d", s=ms_large, label="Trakhtenbrot+ (2011)", zorder=10)
##
## BH Growth tracks..
##
## Varying e t a
#for ii in range(len(eta_variable)):
#ax1.plot(zrange, (np.log10( M_BH_grower_eta[:,ii] )), label =eta_label[ii], linewidth=8, linestyle='--', color='crimson')
## Varying seed BH mass
for jj in range(len(Mseed_variable)):
print("Plotting Mseed_variable lines", jj)
ax1.plot(zrange, (np.log10(M_BH_grower_MBHseed[:,jj])), linewidth=8, linestyle='--')
ax1.plot(zrange, (np.log10(M_BH_grower_MBHseed[:,jj])), label ='$M_{seed}=$'+str(Mseed_variable[jj])+' $M_{\odot}$', linewidth=8, linestyle='--')
## L E G E N D
ax1.legend(loc='upper right', fontsize=fontsize/1.3, frameon='True')
# Setting up the axes...
ax1.set_xlim((zmin, zmax))
ax1.set_ylim((ymin, ymax))
ax1.tick_params('x', direction='in', which='major', bottom='on', top='on', left='on', right='on', size=fontsize/1.6)
ax1.tick_params('x', direction='in', which='minor', bottom='on', top='on', left='on', right='on', size=fontsize/1.6)
ax1.tick_params('y', direction='in', which='major', bottom='on', top='on', left='on', right='on', size=fontsize/1.6)
ax1.tick_params('y', direction='in', which='minor', bottom='on', top='on', left='on', right='on', size=fontsize/1.6)
ax1.xaxis.set_minor_locator(minorLocator)
##
ax1.tick_params(axis='both', labelsize = fontsize/1.1)
ax1.set_xlabel('redshift, $z$', fontsize = fontsize)
ax1.set_ylabel('log (M$_{BM}$) / M$_{\odot}$)', fontsize = fontsize)
ax4 = ax1.twiny()
## If AGE, is the top x-axis
ax4.set_xticks(ageticks)
ax4.set_xticklabels(['{:g}'.format(age) for age in ages.value])
ax4.set_xlim(zmin, zmax) ## the co-ordinate system is in "redshift units"
ax4.set_xlabel('Time since Big Bang (Gyr)', fontsize=fontsize)
ax4.tick_params(axis='both', labelsize=fontsize/1.1)
ax4.xaxis.set_label_coords(0.50, 1.10)
## if REDSHIFT is the top x-axis
#ax4.set_xlim(xmin, xmax) ## The co-ordinate system is in "age units"
#ax4.set_xticks(redshiftticks)
#ax4.set_xticklabels(['{:g}'.format(redshifts) for redshifts in redshifts])
#ax4.tick_params(axis='both', labelsize=36)
#plt.show()
plt.savefig('redshift_vs_Mbh_growth_temp.png',format='png')
plt.close(fig)
| [
"[email protected]"
]
| |
fecce092dd36e224a01aab6a9e1d8b24d5bbf868 | 983ca9afc80dc1bd2cd25e81ec51de8c1fd39394 | /Unit5/5.2/googleSniff.py | 37fc74ef1da2a12b1df291bd03061d1e9e4ea3c1 | []
| no_license | mi1k7ea/Violent-Python | d5630a67cbdc218640d21f58e4081cd6530f32fe | c8048b04e02a6e91aed8e73af36e707b004b115c | refs/heads/master | 2022-11-25T23:06:05.138896 | 2020-08-02T15:03:32 | 2020-08-02T15:03:32 | 284,483,262 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 909 | py | #!/usr/bin/python
#coding=utf-8
import optparse
from scapy.all import *
def findGoogle(pkt):
if pkt.haslayer(Raw):
payload = pkt.getlayer(Raw).load
if 'GET' in payload:
if 'google' in payload:
r = re.findall(r'(?i)\&q=(.*?)\&', payload)
if r:
search = r[0].split('&')[0]
search = search.replace('q=', '').replace('+', ' ').replace('%20', ' ')
print '[+] Searched For: ' + search
def main():
parser = optparse.OptionParser('[*]Usage: python googleSniff.py -i <interface>')
parser.add_option('-i', dest='interface', type='string', help='specify interface to listen on')
(options, args) = parser.parse_args()
if options.interface == None:
print parser.usage
exit(0)
else:
conf.iface = options.interface
try:
print '[*] Starting Google Sniffer.'
sniff(filter='tcp port 80', prn=findGoogle)
except KeyboardInterrupt:
exit(0)
if __name__ == '__main__':
main() | [
"[email protected]"
]
| |
f79e9655cd13395e6de47f2c80331663af24e8a8 | 96e507cf993e26ea9fdc8586073fb5822b9b5c26 | /ex_1.3_datatype_dictionary.py | 04a3c8058f2e27f2d8fcc3cf01ff3531ba2e92ad | []
| no_license | bhoj001/python_tutorial | eff4bd1becccc80950a3ebd55a1abf26985e9cd5 | de717e518ece9989a8ed90f346374dc6cfaeebfc | refs/heads/master | 2020-12-14T21:33:54.559542 | 2020-03-02T09:47:45 | 2020-03-02T09:47:45 | 234,869,534 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,427 | py | '''
author: bhoj bahadur karki
date: 2020-jan-19th
purpose : about dictionary in python
Dictionary: Python dictionary is an unordered collection of items.
While other compound data types have only value as an element, a dictionary has a key: value pair.
Dictionaries are optimized to retrieve values when the key is known.
PythonOrg def:
Dictionaries are sometimes found in other languages as “associative memories” or “associative arrays”.
Unlike sequences, which are indexed by a range of numbers, dictionaries are indexed by keys,
which can be any immutable type; strings and numbers can always be keys. Tuples can be
used as keys if they contain only strings, numbers, or tuples; if a tuple contains any
mutable object either directly or indirectly, it cannot be used as a key. You can’t use
lists as keys, since lists can be
modified in place using index assignments, slice assignments, or methods like append() and extend().
It is best to think of a dictionary as a set of key: value pairs, with the requirement
that the keys are unique (within one dictionary). A pair of braces creates an empty dictionary: {}.
Placing a comma-separated list of key:value pairs within the braces adds
initial key:value pairs to the dictionary; this is also the way dictionaries are written on output.
The main operations on a dictionary are storing a value with some key and extracting
the value given the key. It is also possible to delete a key:value pair with del.
If you store using a key that is already in use, the old value associated with that key is forgotten.
It is an error to extract a value using a non-existent key.
Note: dictionary has key,value
key can be integer, string, float but it has to be unique
>>> x = {2:30,3:2,2:40} # if we repeat a key, one item(here 2:30) is ignored
>>> x
{2: 40, 3: 2}
'''
# -------Creating dictionary------------
# using {}
tel = {'jack': 4098, 'sape': 4139}
tel['mike'] = 4127 # adding to dictionary
print("tel=",tel)
# using dict()
my_dict = dict({1:'apple', 2:'ball'})
print("my_dict=",my_dict)
# from sequence having each item as a pair
my_dict = dict([(1,'apple'), (2,'ball')])
print("my_dict=",my_dict)
# using dict() keyword
x= dict([('juli', 4139), ('max', 4127), ('jack', 4098)])
print("x=",x)
a = {x: x**2 for x in (2, 4, 6)}
print("a=",a)
# --------Accessing element in dictionary-------
# we use key to access element in dictionary e.g. dic[key]
print("juli =",x['juli'])
# ---------Changing element in dictionary-----
# we use equal sign with syntax: dict[key]=value
x['juli'] = 3
print("new x = ",x)
# -------deleting item in dictionary----------
# using dict.pop(key)
x.pop('juli') # this will remove key-value pair of juli(juli:3)
print("after pop x = ",x)
# using del dict[key]
del x['max']
print("after del x = ",x)
# using .clear() to clear all items
x.clear() # this will empty the dictionary
print("after clear x=",x)
# ----------Looping technique in dictionary---------
knights = {'ram': 'the pure', 'robin': 'the brave'}
for k, v in knights.items():
print(k, v)
# ------for sequence datatype like list, tuple, range-------
#------getting index value from sequence datatype like list, tuple, range-------
# use enumerate() function
# When looping through a sequence, the position index and corresponding value can
# be retrieved at the same time using the enumerate() function.
# i =index, v= value, in short form
for i, v in enumerate(['tic', 'tac', 'toe']):
print(i, v)
# -------combine two list and loop------------
# To loop over two or more sequences at the same time, the entries can be paired with
# the zip() function.
questions = ['name', 'quest', 'favorite color']
answers = ['Bhoj', 'to teach programming', 'blue']
for q, a in zip(questions, answers):
print('What is your {0}? It is {1}.'.format(q, a))
#-------- reversing a sequence datatype # range syntax: range(start:int, end:int, step:int)
for item in reversed(range(2,10,2)):
print(item)
# ---------Loop via sorting an item------------
# To loop over a sequence in sorted order, use the sorted() function
# which returns a new sorted list while leaving the source unaltered.
basket = ['apple', 'orange', 'apple', 'pear', 'orange', 'banana']
# here set() function removes the duplicate items
for f in sorted(set(basket)):
print(f)
'''
Method Description
clear() Remove all items form the dictionary.
copy() Return a shallow copy of the dictionary.
fromkeys(seq[, v]) Return a new dictionary with keys from seq and value equal to v (defaults to None).
get(key[,d]) Return the value of key. If key doesnot exit, return d (defaults to None).
items() Return a new view of the dictionary's items (key, value).
keys() Return a new view of the dictionary's keys.
pop(key[,d]) Remove the item with key and return its value or d if key is not found.
If d is not provided and key is not found, raises KeyError.
popitem() Remove and return an arbitary item (key, value). Raises KeyError if the dictionary
is empty.
setdefault(key[,d]) If key is in the dictionary, return its value. If not,
insert key with a value of d and return d (defaults to None).
update([other]) Update the dictionary with the key/value pairs from other,
overwriting existing keys.
values() Return a new view of the dictionary's values
''' | [
"[email protected]"
]
| |
c0ce587c985fdba86762d152de058192b4c8fc8a | 43fd8b12dc1b6a2fc7cf4d9b8a80d3f1ae0fac66 | /Test/others/requeset用法.py | 47a0ce9e6eb99ef82864c5610adc2e675068439b | []
| no_license | gxiang666/python_file | e707f829b2c35e6126bea79e299333faabe76b19 | 2ee0f52d53892d193dc83c10564f7326e0bad0da | refs/heads/master | 2022-12-07T04:16:29.166707 | 2019-10-25T02:59:26 | 2019-10-25T02:59:26 | 139,252,161 | 1 | 0 | null | 2022-11-22T02:38:40 | 2018-06-30T13:35:14 | Python | UTF-8 | Python | false | false | 140 | py | import requests
r = requests.get("https://www.bilibili.com/video/av9784617?p=5")
print(r.status_code)
r.encoding = "utf-8"
print(r.content)
| [
"[email protected]"
]
| |
7371e45ddb1c2902e7bb85f6584abc85dc138382 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03469/s884542061.py | fe4c56bfa8509ab85d7064f736f4b45c11743730 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 106 | py | s = list(map(str,input().split("/")))
if s[0] == "2017":
s[0] = "2018"
print(s[0]+"/"+s[1]+"/"+s[2]) | [
"[email protected]"
]
| |
adac29ebb4dc4dcbd9bb458b8f74a2dd3f338700 | ca8dc4d5b6168648cf8a842fc27191fec3597a09 | /venv/lib/python3.6/site-packages/statsmodels/tools/tests/test_rootfinding.py | 4b840d33271b1842c9f9faf34a5de074491539f9 | [
"MIT"
]
| permissive | iefuzzer/vnpy_crypto | 293a7eeceec18b934680dafc37381d1f5726dc89 | d7eed63cd39b1639058474cb724a8f64adbf6f97 | refs/heads/master | 2020-03-26T20:13:38.780107 | 2018-09-10T06:09:16 | 2018-09-10T06:09:16 | 145,311,871 | 3 | 0 | MIT | 2018-09-10T06:09:18 | 2018-08-19T14:48:32 | Python | UTF-8 | Python | false | false | 2,957 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Mar 23 13:34:19 2013
Author: Josef Perktold
"""
import numpy as np
from statsmodels.tools.rootfinding import brentq_expanding
from numpy.testing import (assert_allclose, assert_equal, assert_raises,
assert_array_less)
def func(x, a):
f = (x - a)**3
return f
def func_nan(x, a, b):
x = np.atleast_1d(x)
f = (x - 1.*a)**3
f[x < b] = np.nan
return f
def funcn(x, a):
f = -(x - a)**3
return f
def test_brentq_expanding():
cases = [
(0, {}),
(50, {}),
(-50, {}),
(500000, dict(low=10000)),
(-50000, dict(upp=-1000)),
(500000, dict(low=300000, upp=700000)),
(-50000, dict(low= -70000, upp=-1000))
]
funcs = [(func, None),
(func, True),
(funcn, None),
(funcn, False)]
for f, inc in funcs:
for a, kwds in cases:
kw = {'increasing':inc}
kw.update(kwds)
res = brentq_expanding(f, args=(a,), **kwds)
#print '%10d'%a, ['dec', 'inc'][f is func], res - a
assert_allclose(res, a, rtol=1e-5)
# wrong sign for start bounds
# doesn't raise yet during development TODO: activate this
# it kind of works in some cases, but not correctly or in a useful way
#assert_raises(ValueError, brentq_expanding, func, args=(-500,), start_upp=-1000)
#assert_raises(ValueError, brentq_expanding, func, args=(500,), start_low=1000)
# low upp given, but doesn't bound root, leave brentq exception
# ValueError: f(a) and f(b) must have different signs
assert_raises(ValueError, brentq_expanding, funcn, args=(-50000,), low= -40000, upp=-10000)
# max_it too low to find root bounds
# ValueError: f(a) and f(b) must have different signs
assert_raises(ValueError, brentq_expanding, func, args=(-50000,), max_it=2)
# maxiter_bq too low
# RuntimeError: Failed to converge after 3 iterations.
assert_raises(RuntimeError, brentq_expanding, func, args=(-50000,), maxiter_bq=3)
# cannot determin whether increasing, all 4 low trial points return nan
assert_raises(ValueError, brentq_expanding, func_nan, args=(-20, 0.6))
# test for full_output
a = 500
val, info = brentq_expanding(func, args=(a,), full_output=True)
assert_allclose(val, a, rtol=1e-5)
info1 = {'iterations': 63, 'start_bounds': (-1, 1),
'brentq_bounds': (100, 1000), 'flag': 'converged',
'function_calls': 64, 'iterations_expand': 3, 'converged': True}
# adjustments for scipy 0.8.0 with changed convergence criteria
assert_array_less(info.__dict__['iterations'], 70)
assert_array_less(info.__dict__['function_calls'], 70)
for k in info1:
if k in ['iterations', 'function_calls']:
continue
assert_equal(info1[k], info.__dict__[k])
assert_allclose(info.root, a, rtol=1e-5)
| [
"[email protected]"
]
| |
becb2f371e27eec0814fc314ec1629220a2c31c2 | ca12625e6d2f3581793694cfc40445a85fc4770b | /bitmex_websocket/_bitmex_websocket.py | 4a5d778dce46cc853d1c0175e929bb37a169943d | [
"MIT"
]
| permissive | kelvinxue/bitmex-websocket | ad10c63ed0fb341f23ed9d9511cc235eb8a5f1b1 | 773531943abc71b0e10b2dc5feec58152796c234 | refs/heads/master | 2020-03-21T04:36:10.788915 | 2018-06-21T04:17:41 | 2018-06-21T04:17:41 | 138,117,395 | 0 | 0 | MIT | 2018-06-21T04:02:13 | 2018-06-21T04:02:13 | null | UTF-8 | Python | false | false | 4,072 | py | from bitmex_websocket.auth.api_key_auth import generate_nonce,\
generate_signature
from bitmex_websocket.settings import settings
from pyee import EventEmitter
from urllib.parse import urlparse
from websocket import WebSocketApp
import alog
import json
import ssl
import time
__all__ = ['BitMEXWebsocket']
class BitMEXWebsocketConnectionError(Exception):
pass
class BitMEXWebsocket(EventEmitter, WebSocketApp):
def __init__(self, should_auth=False, heartbeat=True, ping_interval=10,
ping_timeout=9):
self.ping_timeout = ping_timeout
self.ping_interval = ping_interval
self.should_auth = should_auth
self.heartbeat = heartbeat
self.channels = []
self.reconnect_count = 0
EventEmitter.__init__(self)
WebSocketApp.__init__(
self,
url=self.gen_url(),
header=self.header(),
on_message=self.on_message,
on_close=self.on_close,
on_open=self.on_open,
on_error=self.on_error,
on_pong=self.on_pong
)
self.on('subscribe', self.on_subscribe)
def gen_url(self):
base_url = settings.BASE_URL
url_parts = list(urlparse(base_url))
query_string = ''
if self.heartbeat:
query_string = '?heartbeat=true'
url = "wss://{}/realtime{}".format(url_parts[1], query_string)
return url
def run_forever(self, **kwargs):
"""Connect to the websocket in a thread."""
# setup websocket.run_forever arguments
ws_run_args = {
'sslopt': {"cert_reqs": ssl.CERT_NONE}
}
if self.heartbeat:
ws_run_args['ping_timeout'] = self.ping_timeout
ws_run_args['ping_interval'] = self.ping_interval
alog.debug(ws_run_args)
super().run_forever(**ws_run_args)
def on_pong(self, ws, message):
timestamp = float(time.time() * 1000)
latency = timestamp - (self.last_ping_tm * 1000)
self.emit('latency', latency)
def subscribe(self, channel: str):
subscription_msg = {"op": "subscribe", "args": [channel]}
self._send_message(subscription_msg)
def _send_message(self, message):
self.send(json.dumps(message))
def is_connected(self):
return self.sock.connected
@staticmethod
def on_subscribe(message):
if message['success']:
alog.debug("Subscribed to %s." % message['subscribe'])
else:
raise Exception('Unable to subsribe.')
def on_message(self, ws, message):
"""Handler for parsing WS messages."""
message = json.loads(message)
if 'error' in message:
self.on_error(ws, message['error'])
action = message['action'] if 'action' in message else None
if action:
self.emit('action', message)
elif 'subscribe' in message:
self.emit('subscribe', message)
elif 'status' in message:
self.emit('status', message)
def header(self):
"""Return auth headers. Will use API Keys if present in settings."""
auth_header = []
if self.should_auth:
alog.info("Authenticating with API Key.")
# To auth to the WS using an API key, we generate a signature
# of a nonce and the WS API endpoint.
alog.debug(settings.BITMEX_API_KEY)
nonce = generate_nonce()
api_signature = generate_signature(
settings.BITMEX_API_SECRET, 'GET', '/realtime', nonce, '')
auth = [
"api-nonce: " + str(nonce),
"api-signature: " + api_signature,
"api-key:" + settings.BITMEX_API_KEY
]
return auth_header
def on_open(self, ws):
alog.debug("Websocket Opened.")
self.emit('open')
def on_close(self, ws):
alog.info('Websocket Closed')
def on_error(self, ws, error):
raise BitMEXWebsocketConnectionError(error)
| [
"[email protected]"
]
| |
00164b917dd18b0a31f8dd2c58136718f475bcae | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/rna-transcription/73f4d972b2c74182aa5cae54291062e3.py | 78dc6c4a79f5f91885ccb23cd500e3eb79dd4a86 | []
| no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 201 | py | conversion_dict = {
'G' : 'C',
'C' : 'G',
'T' : 'A',
'A' : 'U'
}
def to_rna(dna):
converted = ""
for nucleotide in dna:
converted += conversion_dict[nucleotide]
return converted
| [
"[email protected]"
]
| |
b454d8b153e5548dd92f8da3f9c6ae1b1b9b1b0c | 4ae7e4805e7b9ff0d949da276f59ec63a10b9fbb | /custom_stock_shipwire/__manifest__.py | ad304ece5ecbf8ff7908d58d65f2babd395c68b0 | []
| no_license | h3llopy/sasmar-addons12 | c94acb1994f0e17f245f0ff6b14d2d21b939c314 | 51a3ae074158fbc695711438888a5ec6c982a2fa | refs/heads/master | 2022-04-07T11:38:24.915350 | 2020-03-03T05:44:42 | 2020-03-03T05:44:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 514 | py | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': "Stock Configuration Customization For Shipwire Shipping",
'description': "Shipwire installation Option in Stock COnfiguration Screen",
'author': "BrowseInfo",
'website': "https://www.brwoseinfo.com",
'category': 'Technical Settings',
'version': '1.0',
'depends': ['delivery', 'mail', 'stock'],
'data': [
'views/stock_config.xml'
],
'auto_install' :True,
}
| [
"[email protected]"
]
| |
0cd10debc2702f4d604aafa5725e36ac4b73485f | 3dc3bbe607ab7b583eb52dbaae86636eb642960a | /tools/data/multisports/format_det_result.py | 84fd78811ee3267ab3beebc4ba497895779781a8 | [
"Apache-2.0"
]
| permissive | open-mmlab/mmaction2 | 659c36c6083fd3d9d072e074a8d4b3a50342b9bd | 582b78fd6c3240500d5cacd292339d7d1ddbb056 | refs/heads/main | 2023-08-28T18:14:50.423980 | 2023-08-10T09:20:06 | 2023-08-10T09:20:06 | 278,810,244 | 3,498 | 1,028 | Apache-2.0 | 2023-09-07T06:50:44 | 2020-07-11T07:19:10 | Python | UTF-8 | Python | false | false | 2,169 | py | # Copyright (c) OpenMMLab. All rights reserved.
from argparse import ArgumentParser
import numpy as np
from mmengine import dump, load
from rich.progress import track
from mmaction.evaluation import link_tubes
def parse_args():
parser = ArgumentParser()
parser.add_argument('test-result', help='path of dumped reuslts')
parser.add_argument(
'--anno-path',
default='data/multisports/videos/trainval/multisports_GT.pkl')
parser.add_argument(
'--frm_out_path',
default=None,
help='frame-level detection results output path')
parser.add_argument(
'--tube_out_path',
default=None,
help='tube-level detection results output path')
args = parser.parse_args()
if not args.frm_out_path:
args.frm_out_path = args.test_result[:-4] + '-formated.pkl'
if not args.tube_out_path:
args.tube_out_path = args.test_result[:-4] + '_vid_dets.pkl'
return args
def format_det_result():
"""convert test results to specified format in MultiSports competition."""
test_results = load(args.test_result)
annos = load(args.anno_path)
test_videos = annos['test_videos'][0]
resolutions = annos['resolution']
frm_dets = []
for pred in track(test_results, description='formating...'):
video_key = pred['video_id'].split('.mp4')[0]
frm_num = pred['timestamp']
bboxes = pred['pred_instances']['bboxes']
cls_scores = pred['pred_instances']['scores']
for bbox, cls_score in zip(bboxes, cls_scores):
video_idx = test_videos.index(video_key)
pred_label = np.argmax(cls_score)
score = cls_score[pred_label]
h, w = resolutions[video_key]
bbox *= np.array([w, h, w, h])
instance_result = np.array(
[video_idx, frm_num, pred_label, score, *bbox])
frm_dets.append(instance_result)
frm_dets = np.array(frm_dets)
video_tubes = link_tubes(annos, frm_dets, K=1)
dump(frm_dets, args.frm_out_path)
dump(video_tubes, args.tube_out_path)
if __name__ == '__main__':
args = parse_args()
format_det_result()
| [
"[email protected]"
]
| |
ce9d8dab3c03c95c47082aca01989154eb9e0553 | 7890d130081829a7c41302066dc1934badb5a474 | /students/lerastromtsova/3/constants.py | f4c293477a6f15e7ba48515caff4b96e7e39d337 | [
"MIT"
]
| permissive | sobolevn/itmo-2019 | 3a05592ff8e2e62c1cadfb361914db42f0d39733 | cf39721874edf52deebc7f72f53455f69317f84a | refs/heads/master | 2020-07-21T06:23:08.581693 | 2020-01-24T12:49:59 | 2020-01-24T12:49:59 | 206,768,545 | 4 | 17 | MIT | 2020-04-12T12:52:50 | 2019-09-06T10:14:25 | Python | UTF-8 | Python | false | false | 557 | py | # -*- coding: utf-8 -*-
from contextlib import contextmanager
from datetime import datetime
import pytest
TEST_DIR = 'test_dirs'
FORMAT_CONSTANT = '{0}/{1}'
SCOPE = 'function'
FOLDER = 'folder'
FILE = 'file'
FOLDERS = 'folders'
FILES = 'files'
FILE_NOT_FOUND = pytest.raises(FileNotFoundError)
FILE_EXISTS = pytest.raises(FileExistsError)
NOW = datetime.now()
TEST_DATE = NOW.replace(year=NOW.year - 3).strftime('%d.%m.%Y %H:%M:%S')
@contextmanager
def does_not_raise():
"""Empty function that is used to show there is no Exception."""
yield
| [
"[email protected]"
]
| |
29c5b059d58988be82fa3c896883f93cde3c55ff | 9cf434b6ee59ab22496ee031fb4ab145bbaff1a2 | /tranque_v1.8.4_source/backend/src/alerts/tests/modules/ef/m2/escenarios_falla/test_eventos_problemas_instrumentacion.py | d1b22280b70a4619588a67b211e25032b7883730 | []
| no_license | oliverhernandezmoreno/SourcesOH | f2ff1a5e3377f0ac1fb8b3153d99d0ee703700b7 | 5d9ca5ab1caceafd4d11207139c9e56210156ef8 | refs/heads/master | 2023-01-05T02:51:25.172103 | 2020-08-27T14:39:34 | 2020-08-27T14:39:34 | 64,422,812 | 0 | 1 | null | 2022-12-30T17:25:10 | 2016-07-28T19:33:44 | JavaScript | UTF-8 | Python | false | false | 1,807 | py | from alerts.tests.modules.ef.ef_controller_base import EFControllerBase
from targets.models import Timeseries
class ProblemasInstrumentacionTestCase(EFControllerBase.TestCase):
def setUp(self):
super().setUp()
target = self.target_object
controller = '.ef.m2.escenarios_falla.eventos_problemas_instrumentacion.'
piezometer_id = self.piezometers[0]
accelerograph_id = self.accelerographs[0]
flowmeter_id = self.flowmeters[0]
turbidimeter_id = self.turbidimeters[0]
self.base_name = '.ef-mvp.m2.failure_scenarios.fi-01.'
self.suffixes = ["A1", "A2", "A3"]
input_independent_ts = [[piezometer_id, self.suffixes],
[accelerograph_id, self.suffixes],
[flowmeter_id, self.suffixes],
[turbidimeter_id, self.suffixes]]
self.modules_hints = [(
f's({instrument.hardware_id}){controller}{suffix}')
for instrument, suffixes in input_independent_ts for suffix in suffixes]
ts = []
for instrument, suffixes in input_independent_ts:
for suffix in suffixes:
self.canonical_name = f'{target.canonical_name}.s-{instrument.hardware_id}{self.base_name}{suffix}'
ts.append((
Timeseries.objects.create(
target=target,
name=suffix,
canonical_name=self.canonical_name,
data_source=instrument,
type=Timeseries.TimeseriesType.TEST
),
suffix
)
)
self.timeseries = ts
self.independent = ts
| [
"[email protected]"
]
| |
c66332176917e79373d21e4ec4db1bc8890df8e4 | fc1141aabffe60455898b014fd8b4a2e8307ce85 | /chapter16_exporting_data/image_exporter.py | 1fdef438720f66da618ab6149297ce98a7ff80e9 | []
| no_license | Karagul/reportlabbookcode | b5bff1609d62fe2bcfb17bfd7b65777121ac175c | e271348d5562f4842b9d1628ef917539a8ebcd5d | refs/heads/master | 2020-09-21T14:58:43.427964 | 2018-12-19T17:40:46 | 2018-12-19T17:40:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 468 | py | # image_exporter.py
import os
import subprocess
def image_exporter(pdf_path, output_dir):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
cmd = ['pdfimages', '-all', pdf_path,
'{}/prefix'.format(output_dir)]
subprocess.call(cmd)
print('Images extracted:')
print(os.listdir(output_dir))
if __name__ == '__main__':
pdf_path = 'reportlab-sample.pdf'
image_exporter(pdf_path, output_dir='images') | [
"[email protected]"
]
| |
6a395b495a62beb5c2164f0226f0f60938285b99 | 0cf9bb9c50c6efc1bc4a7923f42f6fad79039598 | /Homeworks/HW 09_ Catching Hackers Starter Code/testbadlogindetector.py | 6306b04d23901933d31be7a09293687dcd663448 | []
| no_license | AlbMej/CSE-2050-Data-Structures-and-Object-Oriented-Design | c950bada185823c70370522e0735533b41bd726b | bfbe91d698e650d78c20fd535c45108a8dba1030 | refs/heads/master | 2020-04-25T13:20:57.537243 | 2019-03-12T19:54:04 | 2019-03-12T19:54:04 | 172,806,267 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,275 | py | import unittest
from badlogindetector import BadLoginDetector
from logentry import LogEntry
class TestBadLoginDetector(unittest.TestCase):
def testinit(self):
BadLoginDetector(3,10)
BadLoginDetector(30,10000)
def testprocess_all_success(self):
log = ['[%d][1.1.1.1][SUCCESS]' % i for i in range(1000)]
d = BadLoginDetector(3,1000)
for e in log:
newentry = LogEntry.fromstring(e)
self.assertTrue(d.process(newentry))
def testprocess_somefails(self):
log = ['[%d][1.1.1.1][SUCCESS]' % i for i in range(1000)]
log[100] = '[100][2.2.2.2][FAIL]'
log[200] = '[200][2.2.2.2][FAIL]'
log[300] = '[300][2.2.2.2][FAIL]'
d = BadLoginDetector(3,1000)
for e in log:
newentry = LogEntry.fromstring(e)
if newentry.time == 300:
self.assertFalse(d.process(newentry))
else:
self.assertTrue(d.process(newentry))
def testprocess_fails_far_apart(self):
log = ['[%d][1.1.1.1][SUCCESS]' % i for i in range(1000)]
log[100] = '[100][2.2.2.2][FAIL]'
log[200] = '[200][2.2.2.2][FAIL]'
log[300] = '[300][2.2.2.2][FAIL]'
d = BadLoginDetector(3,200)
for e in log:
newentry = LogEntry.fromstring(e)
self.assertTrue(d.process(newentry))
def testprocess_allfails_far_apart(self):
log = ['[%d][1.1.1.1][FAIL]' % i for i in range(1000)]
d = BadLoginDetector(3,2)
for e in log:
newentry = LogEntry.fromstring(e)
self.assertTrue(d.process(newentry))
def testreport_onefail(self):
log = ['[%d][1.1.1.1][SUCCESS]' % i for i in range(1000)]
log[100] = '[100][2.2.2.2][FAIL]'
log[200] = '[200][2.2.2.2][FAIL]'
log[300] = '[300][2.2.2.2][FAIL]'
d = BadLoginDetector(3,201)
for e in log:
newentry = LogEntry.fromstring(e)
d.process(newentry)
self.assertEqual(d.report(), ['2.2.2.2'])
def testreport_twofails_same_ip(self):
log = ['[%d][1.1.1.1][SUCCESS]' % i for i in range(1000)]
log[100] = '[100][2.2.2.2][FAIL]'
log[200] = '[200][2.2.2.2][FAIL]'
log[300] = '[300][2.2.2.2][FAIL]'
log[400] = '[400][2.2.2.2][FAIL]'
d = BadLoginDetector(3,1000)
for e in log:
newentry = LogEntry.fromstring(e)
d.process(newentry)
self.assertEqual(d.report(), ['2.2.2.2'])
def testreport_lots_of_fails(self):
log = ['[%d][1.1.1.%d][FAIL]' % (i, i//3) for i in range(900)]
d = BadLoginDetector(3,3)
for e in log:
newentry = LogEntry.fromstring(e)
d.process(newentry)
self.assertEqual(len(d.report()), 300)
def testreport_onefail_too_far_apart(self):
log = ['[%d][1.1.1.1][SUCCESS]' % i for i in range(1000)]
log[100] = '[100][2.2.2.2][FAIL]'
log[200] = '[200][2.2.2.2][FAIL]'
log[300] = '[300][2.2.2.2][FAIL]'
d = BadLoginDetector(3,150)
for e in log:
newentry = LogEntry.fromstring(e)
d.process(newentry)
self.assertEqual(d.report(), [])
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
6b509628bae8d7e370d8a30f240ccb933c8a259b | bb5b63774924abe86c2cb0d8a09795fcf1a4d822 | /realtime_chat_app/settings.py | a993b4cb91db8e23f4b5bb0125e7902a99710b73 | []
| no_license | IdenTiclla/realtime_chat_app | 769bf432e993ee79cb93bd54489305db3526f4d5 | d2a5187bb9f257c5e8fefe6735d23e5d0eec64e6 | refs/heads/master | 2023-06-23T17:47:41.766605 | 2021-07-21T21:00:25 | 2021-07-21T21:00:25 | 387,920,262 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,305 | py | """
Django settings for realtime_chat_app project.
Generated by 'django-admin startproject' using Django 3.2.5.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-c4b8_#+0@)emex-0l&uq=)z7b91=6bu=y$*!8k^u#k(0%hcfkk'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'chat'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'realtime_chat_app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR / 'templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'realtime_chat_app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.