blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ead463a4fe241c5a35c668a64bc671fe9bf1d28b | 3d30aa4f3a7a3ab8a0a2b75bf3e437834be8c354 | /Victor/dq2.victor.cms/lib/dq2/common/externalcall.py | 4591bda4bb0c2a8b0a928653bcb80b78219075f0 | [] | no_license | mmeoni/DDM | c3baa1f97fe9d1ff0272819bee81a7424cf028db | faba3a414009a362471a8f1eae8f9c11e884c8fd | refs/heads/master | 2021-01-22T07:19:06.698307 | 2016-08-12T07:50:37 | 2016-08-12T07:50:37 | 36,882,324 | 1 | 0 | null | 2015-06-04T16:28:47 | 2015-06-04T16:28:47 | null | UTF-8 | Python | false | false | 2,528 | py | """
Module for handling external process calls.
@author: Miguel Branco
@contact: [email protected]
@since: 1.0
@version: $Id: externalcall.py,v 1.1 2008-05-19 13:16:09 mbranco Exp $
"""
import os
import signal
import sys
import time
import tempfile
class ExternalCallException(Exception):
pass
class ExternalCallTimeOutException(ExternalCallException):
pass
def call(cmd, min_secs=1, timeout_secs=30, interval_secs=1, kill_on_timeout=True):
"""
Do external call by spawning new process.
@raise ExternalCallException: In case of error.
@raise ExternalCallTimeOutException: In case of timeout.
@return: Tuple with status, output
"""
cmd = cmd.strip()
try:
output = tempfile.mktemp()
except RuntimeWarning:
pass
except:
raise ExternalCallException("Could not create temporary file.")
startTime = time.time()
try:
childPid = os.fork()
except:
raise ExternalCallException("Could not spawn process to serve '%s'." % cmd)
if childPid == 0:
try:
# child process
os.setpgrp() # group leader
# redirect outputs to file
f = open(output, 'w')
os.dup2(f.fileno(), sys.stdout.fileno())
os.dup2(f.fileno(), sys.stderr.fileno())
# execute ...
args = cmd.split(' ')
os.execvp(args[0], args)
finally:
os._exit(1)
# parent process
time.sleep(min_secs)
exitCode = None
finished = False
while time.time() - startTime < timeout_secs:
try:
pid, exitCode = os.waitpid(childPid, os.P_NOWAIT)
if pid == 0: # not finished
time.sleep(interval_secs)
continue
elif pid > 0: # done
finished = True
break
except:
break
try:
if finished: # read output file
f = open(output, 'r')
ll = f.readlines()
f.close()
return exitCode, ''.join(ll)
# timed out
if kill_on_timeout:
os.killpg(childPid, signal.SIGKILL)
time.sleep(1)
# wait for any child process without hanging
try:
r = os.waitpid(-1, os.WNOHANG)
except:
pass
raise ExternalCallTimeOutException("Call to '%s' timed out." % cmd)
finally:
try: # always remove temporary file
os.remove(output)
except:
pass
| [
"[email protected]"
] | |
a6b8f9a1376495aee62bb8eea0c2eb13932266ce | 69e7dca194ab7b190e1a72928e28aa3821b47cfb | /Concepts/2 Pointers/253.py | e02124f3f456efdf679b47a09afeffb2a9ce993d | [] | no_license | Dinesh94Singh/PythonArchivedSolutions | a392891b431d47de0d5f606f7342a11b3127df4d | 80cca595dc688ca67c1ebb45b339e724ec09c374 | refs/heads/master | 2023-06-14T14:56:44.470466 | 2021-07-11T06:07:38 | 2021-07-11T06:07:38 | 384,871,541 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,018 | py | """
Given an array of meeting time intervals consisting of start and end times [[s1,e1],[s2,e2],...] (si < ei),
find the minimum number of conference rooms required.
Example 1:
Input: [[0, 30],[5, 10],[15, 20]]
Output: 2
Example 2:
Input: [[7,10],[2,4]]
Output: 1
NOTE: input types have been changed on April 15, 2019. Please reset to default code definition to get new method
signature.
"""
def min_meeting_rooms_using_2_ptrs(intervals):
start_array = sorted([i[0] for i in intervals])
end_array = sorted([i[1] for i in intervals])
used_rooms = 0
start_ptr, end_ptr = 0, 0
while start_ptr < len(intervals):
if start_array[start_ptr] >= end_array[end_ptr]:
# if the meeting starts after the previous meeting - we can use the same room
used_rooms -= 1
end_ptr += 1
used_rooms += 1
start_ptr += 1
return used_rooms
min_meeting_rooms_using_2_ptrs([[7, 10], [2, 4]])
min_meeting_rooms_using_2_ptrs([[0, 30], [5, 10], [15, 20]])
| [
"[email protected]"
] | |
3ae95fdf927adc96976bbe3b77a757f102b1f3b7 | 5b0f27b1a6452328b3d289a9f81ba3b23d1579b2 | /search.py | 6c5ff409cde9509470885667056aba148eb82795 | [
"MIT"
] | permissive | gtback/rfc-elasticsearch | 8151e447af267fedc772d90f4e73b2e8eeac20e3 | 17c03d0dfbd7f7d9c2e22e653e055d3024d5aa8a | refs/heads/master | 2023-05-24T18:59:37.731462 | 2020-12-17T15:04:44 | 2020-12-17T15:04:44 | 28,681,471 | 1 | 0 | MIT | 2023-05-23T00:51:22 | 2015-01-01T06:06:00 | Python | UTF-8 | Python | false | false | 559 | py | #!/usr/bin/env python
import sys
import requests
BASE_URL = "http://localhost:9200/"
INDEX = "rfc"
def main():
term = sys.argv[1]
r = requests.get(BASE_URL + INDEX + "/_search/?q=%s" % term)
results = r.json()
res_count = results['hits']['total']
time = results['took'] / 1000.0
print "%s results in %s s" % (res_count, time)
for hit in results['hits']['hits']:
print "%s - %s (%s)" % (hit['_id'].upper(), hit['_source']['title'],
hit['_score'])
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
ba05471bc5c24f3e2eafdba09eec9e21d783c8a4 | b45d5a1068d47efde8e3c816d68049f7195ba13e | /app/main/MyFlowDomain.py | 99f631401ae5de3424c81ee3b74a1137126c0047 | [] | no_license | guohongjie/YuHuiAutoApi | 57e2c9c84f95030a08f3605e5d337ded051bf50f | 02a52e364f79810bd902f977051469b283889407 | refs/heads/master | 2021-03-03T14:38:47.433661 | 2020-04-30T09:56:27 | 2020-04-30T09:56:27 | 245,967,508 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,384 | py | #!/usr/bin/python
#-*-coding:utf-8 -*-
from app.main import flow
from flask import render_template,request,make_response,jsonify,session
from app.config.api_models import Run_Suite,Test_Domain
from app.config.user_models import DeptName
from app import db
from sqlalchemy import func
@flow.route("/myFlowDomain",methods=["GET"])
def myFlowDomainIndex():
"""
工作流配置主页
:return:
"""
# api_project = Project.query.with_entities(Project.project).distinct().all()
#提取测试项目,传入页面中
test_doamin = Test_Domain.query.filter(Test_Domain.statu==1).distinct().all()
test_group = DeptName.query.filter(DeptName.status==1).all()
return render_template("flow/flowManger.html",
test_groups=test_group,test_doamin=test_doamin)
@flow.route("/flowSearch",methods=["GET"])
def flowSearch():
"""
查询工作流
:return:
"""
test_group = request.args.get('test_group') # 项目组名称
test_domain = request.args.get('test_domain') # 项目组名称
if test_group == 'None' and test_domain == 'None':
# 当项目为空、接口名为空、状态为 全部
datas = Run_Suite.query.all()
elif test_group != 'None' and test_domain == 'None':
datas = Run_Suite.query.filter(Run_Suite.test_group==test_group).all()
elif test_group != 'None' and test_domain != 'None':
datas = Run_Suite.query.filter(Run_Suite.test_group == test_group
).filter(
func.find_in_set(test_domain,Run_Suite.domain)).order_by(
Run_Suite.RunOrderId).all()
else:
datas = Run_Suite.query.filter(
func.find_in_set(test_domain,Run_Suite.domain)
).order_by(Run_Suite.RunOrderId).all()
suiteList = []
for singleDatas in datas:
suiteDatas = {"id":singleDatas.id,
"RunOrderId":singleDatas.RunOrderId,
"domain":singleDatas.domain,
"name":singleDatas.suiteName,
"desc":singleDatas.description,
"statu": "启用" if singleDatas.statu else "停用",
"test_group":singleDatas.test_group}
suiteList.append(suiteDatas)
resp = {"status": 200, "datas": suiteList}
msg_resp = make_response(jsonify(resp))
return msg_resp
@flow.route("/flowSingleDatas",methods=["GET"])
def flowSingleDatas():
"""修改查询数据"""
flow_id = request.args.get("flow_id")
datas = Run_Suite.query.filter(Run_Suite.id==flow_id).first()
deptNameSession = session.get("deptName")
isAdmin = session.get("isAdmin")
if datas.test_group != deptNameSession and isAdmin != True:
resp = {'datas': "当前部门与工作流所属部门不同,无权限修改!", 'code': '400'}
return make_response(jsonify(resp))
msg = {"id":datas.id,
"test_group":datas.test_group,
"name":datas.suiteName,
"domain":datas.domain,
"statu":datas.statu,
"desc":datas.description,
"user":datas.user,
"flow_order":datas.RunOrderId}
resp ={"status":200,"datas":msg}
return make_response(jsonify(resp))
@flow.route("/flowSaveUpdate",methods=["GET"])
def flowSaveUpdate():
"""
保存修改功能
:return:
"""
flow_id = request.args.get("flow_id")
flow_name = request.args.get("flow_name")
flow_domain = request.args.get("flow_domain")
flow_statu = request.args.get("flow_statu")
flow_desc = request.args.get("flow_desc")
user = request.args.get("tester")
flow_order = request.args.get("flow_order")
if not flow_order.isdigit():
resp = {'datas': "执行序号必须为数字", 'code': '400'}
return make_response(jsonify(resp))
try:
datas = Run_Suite.query.filter_by(id=flow_id).update(dict(user=user,
suiteName=flow_name,
domain=flow_domain,
statu= 1 if flow_statu=="1" else 0,
RunOrderId=int(flow_order),
description=flow_desc))
db.session.commit()
resp = {'datas': '更新成功', 'code': '200'}
except Exception as e:
db.session.rollback()
resp = {'datas': str(e), 'code': '400'}
return make_response(jsonify(resp))
@flow.route("/flowDelete",methods=["GET"])
def flowDelete():
flow_id = request.args.get("flow_id")
datas = Run_Suite.query.filter(Run_Suite.id == flow_id).first()
deptNameSession = session.get("deptName")
isAdmin = session.get("isAdmin")
if datas.test_group != deptNameSession and isAdmin != True:
resp = {'datas': "当前部门与工作流所属部门不同,无权限修改!", 'code': '400'}
return make_response(jsonify(resp))
else:
try:
db.session.delete(datas)
db.session.commit()
resp = {'datas': '删除成功', 'code': '200'}
except Exception as e:
db.session.rollback()
resp = {"code": 400, "datas": str(e)}
return make_response(jsonify(resp)) | [
"[email protected]"
] | |
c87f3ed339fd6c794bdc2916bc2f460d92c9aae9 | f5470aedfce5d43809168e8b1c529033a14b12d9 | /reudom/__init__.py | eed71ceda06a7a3ab536b304e539df50436dba8f | [
"Apache-2.0"
] | permissive | braveryzhangsan/reudom | b090db637551de6e5bd2bf726dccc9da42fa4c3a | 1ce2856f2bb7e5fdd4d2d540c9f5601f4dbbadc9 | refs/heads/master | 2022-12-01T10:45:42.507158 | 2020-08-14T09:00:32 | 2020-08-14T09:00:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,161 | py | #!/usr/bin/python
#
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from .running.test_runner import main
from .case import TestCase
from .case import *
from .testdata import ddt, ddt_class
from .skip import skip
import requests
from requests import *
from unittest import TestCase
__author__ = "Barry"
__version__ = "1.2.0.3"
__description__ = "Automated testing framework based on requests and unittest interface."
| [
"[email protected]"
] | |
f543888adbefabe562adb049c44de8222055a93f | 728871b962f2a5ec8d8ec7d5b607def074fb8864 | /W261/HW3-Questions/reducer_s.py | 0b5a9d1f4172b97eac80fbd901c5d89e218febb1 | [] | no_license | leiyang-mids/MIDS | 0191ffbaf9f7f6ec0e77522241c3e76d012850f1 | 918b0d8afc395840626eb31c451ad6c4b2f3bc39 | refs/heads/master | 2020-05-25T15:46:56.480467 | 2019-03-28T16:16:17 | 2019-03-28T16:16:17 | 35,463,263 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 333 | py | #!/usr/bin/python
import sys
# increase counter for mapper being called
sys.stderr.write("reporter:counter:HW3_5,Reducer_s_cnt,1\n")
n_out = 0
n_top = 50
print 'top %d pairs: ' %n_top
for line in sys.stdin:
# parse mapper output
n_out += 1
if n_out <= n_top:
print line.strip().replace(',', '\t') | [
"[email protected]"
] | |
684b1d7d95483a714d039342c9804108fb2166b2 | 65148257eabf62c5a60c5a2723792c3549887f49 | /froide/publicbody/models.py | 55d0ac4af73a8859fe5f3174199936082c9ab74a | [
"MIT"
] | permissive | infoaed/froide | 083ba5d501f10c29f6f75bc7ae656ebee98fc08c | 532b626e62f6ad3d5017261d305721d00c16cd43 | refs/heads/master | 2021-02-11T17:20:02.449999 | 2020-03-02T17:01:19 | 2020-03-02T17:01:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,433 | py | from datetime import timedelta
from django.contrib.gis.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.sites.models import Site
from django.contrib.sites.managers import CurrentSiteManager
from django.urls import reverse
from django.conf import settings
from django.utils.text import Truncator
from django.utils.safestring import mark_safe
from django.utils.html import escape
from django.utils import timezone
from taggit.managers import TaggableManager
from taggit.models import TagBase, TaggedItemBase
from taggit.utils import edit_string_for_tags
from treebeard.mp_tree import MP_Node, MP_NodeManager
from froide.georegion.models import GeoRegion
from froide.helper.date_utils import (
calculate_workingday_range,
calculate_month_range_de
)
from froide.helper.templatetags.markup import markdown
from froide.helper.csv_utils import export_csv
from froide.helper.api_utils import get_fake_api_context
DEFAULT_LAW = settings.FROIDE_CONFIG.get("default_law", 1)
def get_applicable_law(pb=None, law_type=None):
if pb is not None:
pb_laws = pb.laws.all()
juris_laws = FoiLaw.objects.filter(jurisdiction=pb.jurisdiction)
# Check pb laws and then, if empty, pb juris laws
for qs in (pb_laws, juris_laws):
if law_type is not None:
qs = qs.filter(law_type__contains=law_type)
# Prefer meta laws
qs = qs.order_by('-meta')
if qs:
break
try:
return qs[0]
except IndexError:
pass
try:
return FoiLaw.objects.get(id=DEFAULT_LAW)
except FoiLaw.DoesNotExist:
return None
class JurisdictionManager(models.Manager):
def get_visible(self):
return self.get_queryset()\
.filter(hidden=False).order_by('rank', 'name')
def get_list(self):
return self.get_visible().annotate(
num_publicbodies=models.Count('publicbody')
)
class Jurisdiction(models.Model):
name = models.CharField(_("Name"), max_length=255)
slug = models.SlugField(_("Slug"), max_length=255)
description = models.TextField(_("Description"), blank=True)
hidden = models.BooleanField(_("Hidden"), default=False)
rank = models.SmallIntegerField(default=1)
region = models.ForeignKey(
GeoRegion, null=True, on_delete=models.SET_NULL, blank=True
)
objects = JurisdictionManager()
class Meta:
verbose_name = _("Jurisdiction")
verbose_name_plural = _("Jurisdictions")
ordering = ('rank', 'name',)
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('publicbody-show_jurisdiction',
kwargs={'slug': self.slug})
def get_absolute_domain_url(self):
return "%s%s" % (settings.SITE_URL, self.get_absolute_url())
def get_all_laws(self):
laws = FoiLaw.objects.filter(jurisdiction=self)
meta_ids = laws.filter(meta=True).values_list('combined', flat=True)
meta_laws = FoiLaw.objects.filter(pk__in=meta_ids)
return laws.union(meta_laws)
class FoiLaw(models.Model):
name = models.CharField(_("Name"), max_length=255)
slug = models.SlugField(_("Slug"), max_length=255)
description = models.TextField(_("Description"), blank=True)
long_description = models.TextField(_("Website Text"), blank=True)
created = models.DateField(_("Creation Date"), blank=True, null=True)
updated = models.DateField(_("Updated Date"), blank=True, null=True)
request_note = models.TextField(_("request note"), blank=True)
meta = models.BooleanField(_("Meta Law"), default=False)
law_type = models.CharField(_('law type'), max_length=255, blank=True)
combined = models.ManyToManyField(
'FoiLaw',
verbose_name=_("Combined Laws"), blank=True
)
letter_start = models.TextField(_("Start of Letter"), blank=True)
letter_end = models.TextField(_("End of Letter"), blank=True)
jurisdiction = models.ForeignKey(
Jurisdiction, verbose_name=_('Jurisdiction'),
null=True, on_delete=models.SET_NULL, blank=True)
priority = models.SmallIntegerField(_("Priority"), default=3)
url = models.CharField(_("URL"), max_length=255, blank=True)
max_response_time = models.IntegerField(_("Maximal Response Time"),
null=True, blank=True, default=30)
max_response_time_unit = models.CharField(_("Unit of Response Time"),
blank=True, max_length=32, default='day',
choices=(('day', _('Day(s)')),
('working_day', _('Working Day(s)')),
('month_de', _('Month(s) (DE)')),
))
refusal_reasons = models.TextField(
_("Possible Refusal Reasons, one per line, e.g §X.Y: Privacy Concerns"),
blank=True)
mediator = models.ForeignKey('PublicBody', verbose_name=_("Mediator"),
null=True, blank=True,
default=None, on_delete=models.SET_NULL,
related_name="mediating_laws")
email_only = models.BooleanField(_('E-Mail only'), default=False)
requires_signature = models.BooleanField(_('Requires signature'), default=False)
site = models.ForeignKey(Site, verbose_name=_("Site"),
null=True, on_delete=models.SET_NULL,
default=settings.SITE_ID)
class Meta:
verbose_name = _("Freedom of Information Law")
verbose_name_plural = _("Freedom of Information Laws")
def __str__(self):
return "%s (%s)" % (self.name, self.jurisdiction)
def get_absolute_url(self):
return reverse('publicbody-foilaw-show', kwargs={'slug': self.slug})
def get_absolute_domain_url(self):
return "%s%s" % (settings.SITE_URL, self.get_absolute_url())
@property
def request_note_html(self):
return markdown(self.request_note)
@property
def description_html(self):
return markdown(self.description)
@property
def address_required(self):
return not self.email_only
def get_refusal_reason_choices(self):
not_applicable = [('n/a', _("No law can be applied"))]
if self.meta:
return (not_applicable + [
(l[0], "%s: %s" % (law.name, l[1]))
for law in self.combined.all()
for l in law.get_refusal_reason_choices()[1:]])
else:
return (not_applicable + [
(x, Truncator(x).words(12))
for x in self.refusal_reasons.splitlines()])
def as_data(self, request=None):
from .api_views import FoiLawSerializer
if request is None:
ctx = get_fake_api_context()
else:
ctx = {
'request': request
}
return FoiLawSerializer(self, context=ctx).data
def calculate_due_date(self, date=None, value=None):
if date is None:
date = timezone.now()
if value is None:
value = self.max_response_time
if self.max_response_time_unit == "month_de":
return calculate_month_range_de(date, value)
elif self.max_response_time_unit == "day":
return date + timedelta(days=value)
elif self.max_response_time_unit == "working_day":
return calculate_workingday_range(date, value)
class PublicBodyTagManager(models.Manager):
def get_topic_list(self):
return (self.get_queryset().filter(is_topic=True)
.order_by('rank', 'name')
.annotate(num_publicbodies=models.Count('publicbodies'))
)
class PublicBodyTag(TagBase):
is_topic = models.BooleanField(_('as topic'), default=False)
rank = models.SmallIntegerField(_('rank'), default=0)
objects = PublicBodyTagManager()
class Meta:
verbose_name = _("Public Body Tag")
verbose_name_plural = _("Public Body Tags")
class TaggedPublicBody(TaggedItemBase):
tag = models.ForeignKey(PublicBodyTag, on_delete=models.CASCADE,
related_name="publicbodies")
content_object = models.ForeignKey('PublicBody', on_delete=models.CASCADE)
class Meta:
verbose_name = _('Tagged Public Body')
verbose_name_plural = _('Tagged Public Bodies')
class CategoryManager(MP_NodeManager):
def get_category_list(self):
count = models.Count('categorized_publicbodies')
return (self.get_queryset().filter(depth=1, is_topic=True)
.order_by('name')
.annotate(num_publicbodies=count)
)
class Category(TagBase, MP_Node):
is_topic = models.BooleanField(_('as topic'), default=False)
node_order_by = ['name']
objects = CategoryManager()
class Meta:
verbose_name = _("category")
verbose_name_plural = _("categories")
def save(self, *args, **kwargs):
if self.pk is None and kwargs.get('force_insert'):
obj = Category.add_root(
name=self.name,
slug=self.slug,
is_topic=self.is_topic
)
self.pk = obj.pk
else:
TagBase.save(self, *args, **kwargs)
class CategorizedPublicBody(TaggedItemBase):
tag = models.ForeignKey(Category, on_delete=models.CASCADE,
related_name="categorized_publicbodies")
content_object = models.ForeignKey('PublicBody', on_delete=models.CASCADE)
class Meta:
verbose_name = _('Categorized Public Body')
verbose_name_plural = _('Categorized Public Bodies')
class Classification(MP_Node):
name = models.CharField(_("name"), max_length=255)
slug = models.SlugField(_("slug"), max_length=255)
node_order_by = ['name']
class Meta:
verbose_name = _("classification")
verbose_name_plural = _("classifications")
def __str__(self):
return self.name
class PublicBodyManager(CurrentSiteManager):
def get_queryset(self):
return (super(PublicBodyManager, self).get_queryset()
.exclude(email='')
.filter(email__isnull=False, confirmed=True)
)
def get_list(self):
return (
self.get_queryset()
.filter(jurisdiction__hidden=False)
.select_related('jurisdiction')
)
def get_for_search_index(self):
return self.get_queryset()
class PublicBody(models.Model):
name = models.CharField(_("Name"), max_length=255)
other_names = models.TextField(_("Other names"), default="", blank=True)
slug = models.SlugField(_("Slug"), max_length=255)
description = models.TextField(_("Description"), blank=True)
url = models.URLField(_("URL"), null=True, blank=True, max_length=500)
parent = models.ForeignKey('PublicBody', null=True, blank=True,
default=None, on_delete=models.SET_NULL,
related_name="children")
root = models.ForeignKey('PublicBody', null=True, blank=True,
default=None, on_delete=models.SET_NULL,
related_name="descendants")
depth = models.SmallIntegerField(default=0)
classification = models.ForeignKey(Classification, null=True, blank=True,
on_delete=models.SET_NULL)
email = models.EmailField(_("Email"), blank=True, default='')
fax = models.CharField(max_length=50, blank=True)
contact = models.TextField(_("Contact"), blank=True)
address = models.TextField(_("Address"), blank=True)
website_dump = models.TextField(_("Website Dump"), null=True, blank=True)
request_note = models.TextField(_("request note"), blank=True)
file_index = models.CharField(_("file index"), max_length=1024, blank=True)
org_chart = models.CharField(_("organisational chart"), max_length=1024, blank=True)
_created_by = models.ForeignKey(settings.AUTH_USER_MODEL,
verbose_name=_("Created by"),
blank=True, null=True, related_name='public_body_creators',
on_delete=models.SET_NULL)
_updated_by = models.ForeignKey(settings.AUTH_USER_MODEL,
verbose_name=_("Updated by"),
blank=True, null=True, related_name='public_body_updaters',
on_delete=models.SET_NULL)
created_at = models.DateTimeField(_("Created at"), default=timezone.now)
updated_at = models.DateTimeField(_("Updated at"), default=timezone.now)
confirmed = models.BooleanField(_("confirmed"), default=True)
number_of_requests = models.IntegerField(_("Number of requests"),
default=0)
site = models.ForeignKey(Site, verbose_name=_("Site"),
null=True, on_delete=models.SET_NULL, default=settings.SITE_ID)
wikidata_item = models.CharField(max_length=50, blank=True)
jurisdiction = models.ForeignKey(Jurisdiction, verbose_name=_('Jurisdiction'),
blank=True, null=True, on_delete=models.SET_NULL)
geo = models.PointField(null=True, blank=True, geography=True)
regions = models.ManyToManyField(GeoRegion, blank=True)
laws = models.ManyToManyField(FoiLaw,
verbose_name=_("Freedom of Information Laws"))
tags = TaggableManager(through=TaggedPublicBody, blank=True)
categories = TaggableManager(
through=CategorizedPublicBody,
verbose_name=_("categories"),
blank=True
)
non_filtered_objects = models.Manager()
objects = PublicBodyManager()
published = objects
class Meta:
ordering = ('name',)
verbose_name = _("Public Body")
verbose_name_plural = _("Public Bodies")
serializable_fields = ('id', 'name', 'slug', 'request_note_html',
'description', 'url', 'email', 'contact',
'address', 'domain', 'number_of_requests')
def __str__(self):
return self.name
@property
def created_by(self):
return self._created_by
@property
def updated_by(self):
return self._updated_by
@property
def domain(self):
if self.url and self.url.count('/') > 1:
return self.url.split("/")[2]
return None
@property
def all_names(self):
names = [self.name, self.other_names]
if self.jurisdiction:
names.extend([self.jurisdiction.name, self.jurisdiction.slug])
return ' '.join(names)
@property
def request_note_html(self):
return markdown(self.request_note)
@property
def tag_list(self):
return edit_string_for_tags(self.tags.all())
@property
def default_law(self):
# FIXME: Materialize this?
return self.get_applicable_law()
def get_applicable_law(self, law_type=None):
return get_applicable_law(pb=self, law_type=law_type)
def get_absolute_url(self):
return reverse('publicbody-show', kwargs={"slug": self.slug})
def get_absolute_short_url(self):
return reverse('publicbody-publicbody_shortlink', kwargs={
'obj_id': self.pk
})
def get_absolute_domain_url(self):
return "%s%s" % (settings.SITE_URL, self.get_absolute_url())
def get_absolute_domain_short_url(self):
return "%s%s" % (settings.SITE_URL, self.get_absolute_short_url())
def get_mediator(self):
law = self.default_law
if law is None:
return None
return law.mediator
def get_label(self):
return mark_safe(
'%(name)s - <a href="%(url)s" target="_blank" '
'class="info-link">%(detail)s</a>' % {
"name": escape(self.name),
"url": self.get_absolute_url(),
"detail": _("More Info")
}
)
def as_data(self, request=None):
from .api_views import PublicBodyListSerializer
if request is None:
ctx = get_fake_api_context()
else:
ctx = {
'request': request
}
return PublicBodyListSerializer(self, context=ctx).data
@property
def children_count(self):
return len(PublicBody.objects.filter(parent=self))
@classmethod
def export_csv(cls, queryset):
fields = (
"id", "name", "email", "fax", "contact",
"address", "url",
('classification', lambda x: x.classification.name if x.classification else None),
"jurisdiction__slug",
("categories", lambda x: edit_string_for_tags(x.categories.all())),
"other_names", "website_dump", "description",
"request_note", "parent__id",
('regions', lambda obj: ','.join(str(x.id) for x in obj.regions.all()))
)
return export_csv(queryset, fields)
class ProposedPublicBodyManager(CurrentSiteManager):
def get_queryset(self):
return (super(ProposedPublicBodyManager, self).get_queryset()
.filter(confirmed=False)
)
class ProposedPublicBody(PublicBody):
objects = ProposedPublicBodyManager()
class Meta:
proxy = True
ordering = ('-created_at',)
verbose_name = _('Proposed Public Body')
verbose_name_plural = _('Proposed Public Bodies')
def confirm(self):
if self.confirmed:
return None
self.confirmed = True
self.save()
counter = 0
for request in self.foirequest_set.all():
if request.confirmed_public_body():
counter += 1
return counter
| [
"[email protected]"
] | |
b9384b6227707f4ed9d583e9c974d5122e70e454 | ab9196b6356e3c0af7baf7b768d7eb8112243c06 | /Django/Django_day1/blog/urls.py | c63f69d4cac9c4131fcef86ac418244139d24999 | [] | no_license | wngus9056/Datascience | 561188000df74686f42f216cda2b4e7ca3d8eeaf | a2edf645febd138531d4b953afcffa872ece469b | refs/heads/main | 2023-07-01T00:08:00.642424 | 2021-08-07T02:10:25 | 2021-08-07T02:10:25 | 378,833,144 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 187 | py | from django.urls import path
from blog import views
urlpatterns = [
path("", views.index, name="index"),
path("<int:pk>/", views.article_detail, name="article_detail"),
] | [
"[email protected]"
] | |
19a4702b76ab3e91095edea734a8f7632647851e | eae6ed9ec6cd4a08b133fab43c7ccd4a9de090ec | /reviews/admin.py | 896e389e21e18eea5beb99413ab9776fbf5a5903 | [] | no_license | apple2062/airbnb-Django | 3e891ad694a023993ae22b8063774fc740f57bc6 | 980b9015918393ddfc3ad4646d22fe8f5be6b159 | refs/heads/master | 2023-03-05T17:09:51.035035 | 2021-02-18T08:44:11 | 2021-02-18T08:44:11 | 299,503,702 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 309 | py | from django.contrib import admin
from . import models
@admin.register(models.Review)
class ReviewAdmin(admin.ModelAdmin):
"""Review Admin Definition """
list_display = (
"__str__",
"rating_average",
) # 이와 같은 식으로 나의 __str__을 list_display에 쓸 수 있음
| [
"[email protected]"
] | |
1932461e9eaad09216df97a1a08acf7dd0573387 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p00007/s079875761.py | c6d74398ecb3fb6ceb055304bb8e512da7996b70 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 239 | py | import sys
import math
def main():
n = int(input().rstrip())
r = 1.05
digit = 3
a = 100000
for i in range(n):
a = math.ceil(a*r/10**digit)*10**digit
print(a)
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
685efba0c2f64061426542281e99688f1b173005 | e81d274d6a1bcabbe7771612edd43b42c0d48197 | /Python基础/day17(函数工具、时间工具、加密工具)/demo/03_hashlib/01_hashlib.py | ad4d27b1aad677915b9e48e3a02260f4c9eacbb9 | [
"MIT"
] | permissive | ChWeiking/PythonTutorial | 1259dc04c843382f2323d69f6678b9431d0b56fd | 1aa4b81cf26fba2fa2570dd8e1228fef4fd6ee61 | refs/heads/master | 2020-05-15T00:50:10.583105 | 2016-07-30T16:03:45 | 2016-07-30T16:03:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 445 | py | '''
账户: [email protected] [email protected]
登录密码: gebilaowang 234khkhwrk998y892h3i4hkhkhwkrhwr8h2k34hk32hr
支付密码: 123321 skhksjhoiw329822oi3h4hkjshrkjshkdshfiudshsih
'''
import hashlib
pwd = '123456'
#md5加密的对象
m = hashlib.md5()
#将密码更新加密对象中
m.update(pwd.encode('utf-8'))
#生成长度32的16进制组成的字符串
pwd = m.hexdigest()
print(pwd)
'''
自学sha模块的加密
''' | [
"[email protected]"
] | |
1d6334b644cc3563d65c5dcb164895fcfa99b8f9 | a8e2c66b3ebadfc17ee9aee197b3f466534cee16 | /system/venv/Scripts/pip3.5-script.py | 76456cfd70a2ec5f697bd1105a920e5d016e2128 | [] | no_license | yintiannong/98kar | 49b6db186a4543a7c50671df990bb491846c1a98 | 3863529f57e9d2d9bc1bdf8188916e25ad289db0 | refs/heads/master | 2022-01-07T05:49:31.566453 | 2019-05-22T07:04:45 | 2019-05-22T07:04:45 | 187,794,966 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 393 | py | #!F:\0000\system\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3.5'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip3.5')()
)
| [
"[email protected]"
] | |
004a6297b957c2a0d940519ca02727844643bcc3 | f3b233e5053e28fa95c549017bd75a30456eb50c | /mcl1_input/L38/38-60_MD_NVT_rerun/set_7.py | fcb5f698499b354a1192727363363c4b6ea7990d | [] | no_license | AnguseZhang/Input_TI | ddf2ed40ff1c0aa24eea3275b83d4d405b50b820 | 50ada0833890be9e261c967d00948f998313cb60 | refs/heads/master | 2021-05-25T15:02:38.858785 | 2020-02-18T16:57:04 | 2020-02-18T16:57:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 740 | py | import os
dir = '/mnt/scratch/songlin3/run/mcl1/L38/MD_NVT_rerun/ti_one-step/38_60/'
filesdir = dir + 'files/'
temp_prodin = filesdir + 'temp_prod_7.in'
temp_pbs = filesdir + 'temp_7.pbs'
lambd = [ 0.00922, 0.04794, 0.11505, 0.20634, 0.31608, 0.43738, 0.56262, 0.68392, 0.79366, 0.88495, 0.95206, 0.99078]
for j in lambd:
os.chdir("%6.5f" %(j))
workdir = dir + "%6.5f" %(j) + '/'
#prodin
prodin = workdir + "%6.5f_prod_7.in" %(j)
os.system("cp %s %s" %(temp_prodin, prodin))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, prodin))
#PBS
pbs = workdir + "%6.5f_7.pbs" %(j)
os.system("cp %s %s" %(temp_pbs, pbs))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, pbs))
#submit pbs
#os.system("qsub %s" %(pbs))
os.chdir(dir)
| [
"[email protected]"
] | |
1effd3531266c311d607bffb7b5933a9f7a7671a | a2181507e57baca97a1cf1bbbdc9c631f133b446 | /booking/admin.py | f9cdaa8eafb7b21ee90df4eb2bbcb8bce2d66ac5 | [] | no_license | Fabricourt/tika | b35739cb61dde3eeb454d05acb871f992e29a82d | bcf1bc6b9cac54eb866ad1e543fba23bcdbba9e1 | refs/heads/master | 2023-05-02T18:37:48.487231 | 2019-06-13T15:46:09 | 2019-06-13T15:46:09 | 191,501,190 | 0 | 0 | null | 2023-04-21T20:32:25 | 2019-06-12T05:06:35 | JavaScript | UTF-8 | Python | false | false | 307 | py | from django.contrib import admin
from .models import About, Lessor, Lessee, Truck, Onhire, Booktruck, Driver
admin.site.register(About)
admin.site.register(Lessor)
admin.site.register(Lessee)
admin.site.register(Truck)
admin.site.register(Driver)
admin.site.register(Onhire)
admin.site.register(Booktruck) | [
"[email protected]"
] | |
8d7c8cce3c07460bc3645dc375c8ef5e68c8ff68 | 2ef27655cd1deb9de4074249e559269abd334fa1 | /6 kyu/Rock, Paper, Scissor, Lizard, Spock Game.py | 6b24043430a8cbc31ae6e9d04f0b19255d9e8a30 | [] | no_license | sieczkah/Codewars_KATA | c7606b9a88693e2550af0ef55808f34c00e77b73 | 68d5d4a133a015e49bcdbff29ee45e3baefcd652 | refs/heads/main | 2023-05-06T03:59:01.403765 | 2021-05-24T19:36:34 | 2021-05-24T19:36:34 | 334,698,441 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 560 | py | """https://www.codewars.com/kata/569651a2d6a620b72e000059/train/python"""
options = {
'spock': ['scissor', 'rock'],
'scissor': ['paper', 'lizard'],
'paper': ['rock', 'spock'],
'rock': ['lizard', 'scissor'],
'lizard': ['spock', 'paper']
}
def result(p1, p2):
if p1.lower() not in options.keys() or p2.lower() not in options.keys():
return 'Oh, Unknown Thing'
elif p1.lower() == p2.lower().lower():
return 'Draw!'
else:
return 'Player 1 won!' if p2.lower() in options[p1.lower()] else 'Player 2 won!'
| [
"[email protected]"
] | |
45b12c6735dafa0fc65e5c5777d5a0c9ae8845f8 | 2456cc561491b496c64eb2f1177c7575c849fe42 | /pyscf/scf/test/test_h2o.py | ff7d73a3a111658ba2738641823d84ca83a9289e | [
"BSD-2-Clause"
] | permissive | pulkin/pyscf | 5e1acb390d0db3ffe8e70c2a4f3f67884c6c5e52 | e758cc3708fd84c4e9e2d9dca276f6924caadc77 | refs/heads/master | 2021-01-22T19:30:45.423740 | 2017-07-11T20:17:36 | 2017-07-11T20:17:36 | 85,197,665 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,752 | py | #!/usr/bin/env python
#
# Author: Qiming Sun <[email protected]>
#
import unittest
import numpy
import scipy.linalg
import tempfile
from pyscf import lib
from pyscf import gto
from pyscf import scf
from pyscf.scf import dhf
mol = gto.Mole()
mol.build(
verbose = 5,
output = '/dev/null',
atom = [
["O" , (0. , 0. , 0.)],
[1 , (0. , -0.757 , 0.587)],
[1 , (0. , 0.757 , 0.587)] ],
basis = {"H": '6-31g',
"O": '6-31g',}
)
molsym = mol.copy()
molsym.symmetry = True
molsym.build(0, 0)
class KnowValues(unittest.TestCase):
def test_nr_rhf(self):
rhf = scf.RHF(mol)
rhf.conv_tol = 1e-11
self.assertAlmostEqual(rhf.scf(), -75.98394849812, 9)
def test_nr_rohf(self):
mol = gto.Mole()
mol.build(
verbose = 0,
atom = [
["O" , (0. , 0. , 0.)],
[1 , (0. , -0.757 , 0.587)],
[1 , (0. , 0.757 , 0.587)] ],
basis = {"H": '6-31g',
"O": '6-31g',},
charge = 1,
spin = 1,
)
mf = scf.ROHF(mol)
mf.conv_tol = 1e-11
self.assertAlmostEqual(mf.scf(), -75.578396379589748, 9)
def test_nr_uhf(self):
uhf = scf.UHF(mol)
uhf.conv_tol = 1e-11
self.assertAlmostEqual(uhf.scf(), -75.98394849812, 9)
def test_nr_df_rhf(self):
rhf = scf.density_fit(scf.RHF(mol))
rhf.conv_tol = 1e-11
self.assertAlmostEqual(rhf.scf(), -75.983210886950, 9)
def test_nr_df_rohf(self):
mol = gto.Mole()
mol.build(
verbose = 0,
atom = [
["O" , (0. , 0. , 0.)],
[1 , (0. , -0.757 , 0.587)],
[1 , (0. , 0.757 , 0.587)] ],
basis = {"H": '6-31g',
"O": '6-31g',},
charge = 1,
spin = 1,
)
mf = scf.density_fit(scf.ROHF(mol))
mf.conv_tol = 1e-11
self.assertAlmostEqual(mf.scf(), -75.5775921401438, 9)
def test_nr_df_uhf(self):
uhf = scf.density_fit(scf.UHF(mol))
uhf.conv_tol = 1e-11
self.assertAlmostEqual(uhf.scf(), -75.983210886950, 9)
def test_nr_rhf_no_mem(self):
rhf = scf.RHF(mol)
rhf.conv_tol = 1e-11
rhf.max_memory = 0
self.assertAlmostEqual(rhf.scf(), -75.98394849812, 9)
def test_nr_uhf_no_mem(self):
uhf = scf.UHF(mol)
uhf.conv_tol = 1e-11
uhf.max_memory = 0
self.assertAlmostEqual(uhf.scf(), -75.98394849812, 9)
def test_nr_rhf_no_direct(self):
rhf = scf.RHF(mol)
rhf.conv_tol = 1e-11
rhf.max_memory = 0
rhf.direct_scf = False
self.assertAlmostEqual(rhf.scf(), -75.98394849812, 9)
def test_nr_uhf_no_direct(self):
uhf = scf.UHF(mol)
uhf.conv_tol = 1e-11
uhf.max_memory = 0
uhf.direct_scf = False
self.assertAlmostEqual(uhf.scf(), -75.98394849812, 9)
def test_r_uhf(self):
uhf = dhf.UHF(mol)
uhf.conv_tol_grad = 1e-5
self.assertAlmostEqual(uhf.scf(), -76.038520463270061, 7)
def test_r_rhf(self):
uhf = dhf.RHF(mol)
uhf.conv_tol_grad = 1e-5
self.assertAlmostEqual(uhf.scf(), -76.038520463270061, 7)
def test_level_shift_uhf(self):
uhf = scf.UHF(mol)
uhf.level_shift = .2
self.assertAlmostEqual(uhf.scf(), -75.98394849812, 9)
def test_energy_nuc(self):
self.assertAlmostEqual(mol.energy_nuc(), 9.18825841775, 10)
def test_nr_rhf_symm(self):
mol1 = mol.copy()
mol1.symmetry = 1
mol1.build()
rhf = scf.hf.RHF(mol1)
rhf.conv_tol = 1e-11
self.assertAlmostEqual(rhf.scf(), -75.98394849812, 9)
def test_nr_rohf_symm(self):
mol = gto.Mole()
mol.build(
verbose = 0,
atom = [
["O" , (0. , 0. , 0.)],
[1 , (0. , -0.757 , 0.587)],
[1 , (0. , 0.757 , 0.587)] ],
basis = {"H": '6-31g',
"O": '6-31g',},
charge = 1,
spin = 1,
symmetry = True,
)
mf = scf.RHF(mol)
mf.conv_tol = 1e-11
self.assertAlmostEqual(mf.scf(), -75.578396379589748, 9)
def test_nr_uhf_symm(self):
mol1 = mol.copy()
mol1.symmetry = 1
mol1.build()
uhf = scf.UHF(mol1)
uhf.conv_tol = 1e-11
self.assertAlmostEqual(uhf.scf(), -75.98394849812, 9)
def test_init_guess_minao(self):
dm = scf.hf.init_guess_by_minao(mol)
s = scf.hf.get_ovlp(mol)
occ, mo = scipy.linalg.eigh(dm, s, type=2)
ftmp = tempfile.NamedTemporaryFile(dir=lib.param.TMPDIR)
scf.chkfile.dump_scf(mol, ftmp.name, 0, occ, mo, occ)
self.assertAlmostEqual(numpy.linalg.norm(dm), 3.0334714065913508, 9)
mf = scf.hf.RHF(mol)
dm0 = scf.hf.init_guess_by_chkfile(mol, ftmp.name, project=False)
dm1 = mf.get_init_guess(key='minao')
self.assertTrue(numpy.allclose(dm0, dm1))
mf = scf.DHF(mol)
dm0 = scf.dhf.init_guess_by_chkfile(mol, ftmp.name, project=False)
dm1 = mf.get_init_guess(key='minao')
self.assertTrue(numpy.allclose(dm0, dm1))
def test_init_guess_atom(self):
dm = scf.hf.init_guess_by_atom(mol)
s = scf.hf.get_ovlp(mol)
occ, mo = scipy.linalg.eigh(dm, s, type=2)
ftmp = tempfile.NamedTemporaryFile(dir=lib.param.TMPDIR)
scf.chkfile.dump_scf(mol, ftmp.name, 0, occ, mo, occ)
self.assertAlmostEqual(numpy.linalg.norm(dm), 3.064429619915702, 8)
mf = scf.hf.RHF(mol)
dm0 = scf.rhf.init_guess_by_chkfile(mol, ftmp.name, project=False)
dm1 = mf.init_guess_by_atom(mol)
self.assertTrue(numpy.allclose(dm0, dm1))
mf = scf.DHF(mol)
dm0 = scf.dhf.init_guess_by_chkfile(mol, ftmp.name, project=False)
dm1 = mf.init_guess_by_atom(mol)
self.assertTrue(numpy.allclose(dm0, dm1))
pmol = gto.M(atom=mol.atom, basis='ccpvdz')
pmol.cart = True
dm = scf.hf.init_guess_by_atom(pmol)
self.assertAlmostEqual(numpy.linalg.norm(dm), 2.923422868807739, 8)
def test_init_guess_1e(self):
dm = scf.hf.init_guess_by_1e(mol)
s = scf.hf.get_ovlp(mol)
occ, mo = scipy.linalg.eigh(dm, s, type=2)
ftmp = tempfile.NamedTemporaryFile(dir=lib.param.TMPDIR)
scf.chkfile.dump_scf(mol, ftmp.name, 0, occ, mo, occ)
self.assertAlmostEqual(numpy.linalg.norm(dm), 5.3700828975288122, 9)
mf = scf.hf.RHF(mol)
dm0 = scf.rhf.init_guess_by_chkfile(mol, ftmp.name, project=False)
dm1 = mf.init_guess_by_1e(mol)
self.assertTrue(numpy.allclose(dm0, dm1))
mf = scf.rohf.ROHF(mol)
dm1 = mf.init_guess_by_1e(mol)
self.assertAlmostEqual(numpy.linalg.norm(dm1),
5.3700828975288122/numpy.sqrt(2), 9)
mf = scf.rohf.ROHF(molsym)
dm1 = mf.init_guess_by_1e(mol)
self.assertAlmostEqual(numpy.linalg.norm(dm1),
5.3700828975288122/numpy.sqrt(2), 9)
mf = scf.DHF(mol)
dm1 = mf.init_guess_by_1e(mol)
self.assertAlmostEqual(numpy.linalg.norm(dm1), 7.5925205205065422, 9)
def test_init_guess_chkfile(self):
ftmp = tempfile.NamedTemporaryFile(dir=lib.param.TMPDIR)
def save(HFclass):
mf0 = HFclass(mol)
mf0.chkfile = ftmp.name
h = mf0.get_hcore(mol)
s = mf0.get_ovlp(mol)
f = mf0.get_fock(h, s, numpy.zeros_like(h), numpy.zeros_like(h))
mo_energy, mo_coeff = mf0.eig(f, s)
mo_occ = mf0.get_occ(mo_energy, mo_coeff)
e_tot = 0
mf0.dump_chk(locals())
def check(HFclass, ref):
mol1 = mol.copy()
mol1.basis = 'cc-pvdz'
mol1.build()
mf1 = HFclass(mol1)
mf1.chkfile = ftmp.name
dm1 = mf1.init_guess_by_chkfile()
self.assertAlmostEqual(numpy.linalg.norm(dm1), ref, 9)
save(scf.hf.RHF)
check(scf.hf.RHF, 5.2644790347333048)
check(scf.rohf.ROHF, 3.7225488248743273)
check(scf.uhf.UHF, 3.7225488248743273)
check(scf.dhf.UHF, 3.7225488248743273)
save(scf.uhf.UHF)
check(scf.hf.RHF, 5.2644790347333048)
check(scf.rohf.ROHF, 3.7225488248743273)
check(scf.uhf.UHF, 3.7225488248743273)
check(scf.dhf.UHF, 3.7225488248743273)
save(scf.dhf.UHF)
check(scf.dhf.UHF, 7.3540281989311271)
if __name__ == "__main__":
print("Full Tests for H2O")
unittest.main()
| [
"[email protected]"
] | |
11a859338528120c65e4bacf234b59d1f334a0da | 88fa6387ef13ffff6516a0fee8f574e3f219b3f2 | /mlrefined_libraries/calculus_library/secant_to_tangent_3d.py | 2c363b83bc1422ee4a14c32dad7d12758f3fa4c2 | [] | no_license | bdieu178/mlrefined | 49f017c907a3118bd546aa0a25dcc73b70ded27b | 515364ddd0bffee10ce4ab4eb9f9b164e81f8bd1 | refs/heads/master | 2021-01-19T15:12:45.737311 | 2017-08-19T23:58:56 | 2017-08-19T23:58:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,689 | py | # import custom JS animator
from mlrefined_libraries.JSAnimation_slider_only import IPython_display_slider_only
# import standard plotting and animation
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from IPython.display import clear_output
import time
from matplotlib import gridspec
from mpl_toolkits.mplot3d import proj3d
from mpl_toolkits.mplot3d.proj3d import proj_transform
# import autograd functionality
from autograd import grad as compute_grad # The only autograd function you may ever need
import autograd.numpy as np
import math
import copy
# function for producing fixed image of tangency along each input axis, along with full tangent hyperplane (first order Taylor Series approximation)
def draw_it(func,**kwargs):
view = [33,50]
if 'view' in kwargs:
view = kwargs['view']
# compute gradient, points
anchor = [0,0]
anchor = np.array([float(anchor[0]),float(anchor[1])])
anchor.shape = (2,1)
g_anchor = func(anchor)
# file tracer
tracer = np.asarray([0,10**-5])
tracer = np.array([float(tracer[0]),float(tracer[1])])
tracer.shape = (2,1)
g_tracer = func(tracer)
# construct figure
fig = plt.figure(figsize = (5,2.5))
artist = fig
# remove whitespace from figure
fig.subplots_adjust(left=0, right=1, bottom=0, top=1) # remove whitespace
fig.subplots_adjust(wspace=0.01,hspace=0.01)
# create subplot with 3 panels, plot input function in center plot
gs = gridspec.GridSpec(1, 3, width_ratios=[1,1,1])
ax1 = plt.subplot(gs[0],projection='3d');
ax2 = plt.subplot(gs[1],projection='3d');
ax3 = plt.subplot(gs[2],projection='3d');
### first panel - partial with respect to w_1 ###
# scatter anchor point
ax1.scatter(anchor[0],anchor[1],g_anchor,s = 50,c = 'lime',edgecolor = 'k',linewidth = 1)
# plot hyperplane connecting the anchor to tracer
secant(func,anchor,tracer,ax1)
# plot function
plot_func(func,view,ax1)
### second panel - partial with respect to w_2 ###
tracer = np.flipud(tracer)
ax2.scatter(anchor[0],anchor[1],g_anchor,s = 50,c = 'lime',edgecolor = 'k',linewidth = 1)
# plot hyperplane connecting the anchor to tracer
secant(func,anchor,tracer,ax2)
# plot function
plot_func(func,view,ax2)
### third panel - plot full tangent hyperplane at anchor ###
ax3.scatter(anchor[0],anchor[1],g_anchor,s = 50,c = 'lime',edgecolor = 'k',linewidth = 1)
# plot hyperplane connecting the anchor to tracer
tangent(func,anchor,ax3)
# plot function
plot_func(func,view,ax3)
# main function for plotting individual axes tangent approximations
def animate_it(func,**kwargs):
view = [33,50]
if 'view' in kwargs:
view = kwargs['view']
num_frames = 10
if 'num_frames' in kwargs:
num_frames = kwargs['num_frames']
# compute gradient, points
anchor = [0,0]
anchor = np.array([float(anchor[0]),float(anchor[1])])
anchor.shape = (2,1)
g_anchor = func(anchor)
# compute tracer range
z = np.zeros((num_frames,1))
tracer_range = np.linspace(-2.5,2.5,num_frames)
ind = np.argmin(abs(tracer_range))
tracer_range[ind] = 10**-5
tracer_range.shape = (num_frames,1)
tracer_range = np.concatenate((tracer_range,z),axis=1)
# construct figure
fig = plt.figure(figsize = (6,3))
artist = fig
# remove whitespace from figure
fig.subplots_adjust(left=0, right=1, bottom=0, top=1) # remove whitespace
fig.subplots_adjust(wspace=0.01,hspace=0.01)
# create subplot with 3 panels, plot input function in center plot
gs = gridspec.GridSpec(1, 2, width_ratios=[1,1])
ax1 = plt.subplot(gs[0],projection='3d');
ax2 = plt.subplot(gs[1],projection='3d');
# start animation
def animate(k):
# clear the panels
ax1.cla()
ax2.cla()
# print rendering update
if np.mod(k+1,25) == 0:
print ('rendering animation frame ' + str(k+1) + ' of ' + str(num_frames))
if k == num_frames - 1:
print ('animation rendering complete!')
time.sleep(1.5)
clear_output()
if k > 0:
# pull current tracer
tracer = tracer_range[k-1]
tracer = np.array([float(tracer[0]),float(tracer[1])])
tracer.shape = (2,1)
g_tracer = func(tracer)
### draw 3d version ###
for ax in [ax1,ax2]:
# plot function
plot_func(func,view,ax)
if k > 0:
# scatter anchor point
ax.scatter(anchor[0],anchor[1],g_anchor,s = 50,c = 'lime',edgecolor = 'k',linewidth = 1)
# plot hyperplane connecting the anchor to tracer
secant(func,anchor,tracer,ax)
# reset tracer
tracer = np.flipud(tracer)
return artist,
anim = animation.FuncAnimation(fig, animate,frames=num_frames+1, interval=num_frames+1, blit=True)
return(anim)
# plot secant hyperplane, as well as guides for both anchor and tracer point
def secant(func,anchor,tracer,ax):
# evaluate function at anchor and tracer
g_anchor = func(anchor)
g_tracer = func(tracer)
anchor_orig = copy.deepcopy(anchor)
tracer_orig = copy.deepcopy(tracer)
# determine non-zero component of tracer, compute slope of secant line
anchor = anchor.flatten()
tracer = tracer.flatten()
ind = np.argwhere(tracer != 0)
anchor = anchor[ind]
tracer = tracer[ind]
# plot secant plane
color = 'lime'
if abs(anchor - tracer) > 10**-4:
# scatter tracer point
ax.scatter(tracer_orig[0],tracer_orig[1],g_tracer,s = 50,c = 'b',edgecolor = 'k',linewidth = 1)
# change color to red
color = 'r'
# plot visual guide for tracer
w = np.linspace(0,g_tracer,100)
o = np.ones(100)
ax.plot(o*tracer_orig[0],o*tracer_orig[1],w,linewidth = 1.5,alpha = 1,color = 'k',linestyle = '--')
w = np.linspace(0,g_anchor,100)
o = np.ones(100)
ax.plot(o*anchor_orig[0],o*anchor_orig[1],w,linewidth = 1.5,alpha = 1,color = 'k',linestyle = '--')
# compute slope of secant plane
slope = (g_anchor - g_tracer)/float(anchor - tracer)
# create function for hyperplane connecting anchor to tracer
w_tan = np.linspace(-2.5,2.5,200)
w1tan_vals, w2tan_vals = np.meshgrid(w_tan,w_tan)
w1tan_vals.shape = (len(w_tan)**2,1)
w2tan_vals.shape = (len(w_tan)**2,1)
wtan_vals = np.concatenate((w1tan_vals,w2tan_vals),axis=1).T
# create tangent hyperplane formula, evaluate
h = lambda w: g_anchor + slope*(w[ind] - anchor)
h_vals = h(wtan_vals)
# reshape everything and prep for plotting
w1tan_vals.shape = (len(w_tan),len(w_tan))
w2tan_vals.shape = (len(w_tan),len(w_tan))
h_vals.shape = (len(w_tan),len(w_tan))
# plot hyperplane and guides based on proximity of tracer to anchor
ax.plot_surface(w1tan_vals, w2tan_vals, h_vals, alpha = 0.2,color = color,zorder = 3,rstride=50, cstride=50,linewidth=0.5,edgecolor = 'k')
# form tangent hyperplane
def tangent(func,anchor,ax):
# compute gradient
grad = compute_grad(func)
grad_val = grad(anchor)
grad_val.shape = (2,1)
g_val = func(anchor)
# create input for tangent hyperplane
w_tan = np.linspace(-2.5,2.5,200)
w1tan_vals, w2tan_vals = np.meshgrid(w_tan,w_tan)
w1tan_vals.shape = (len(w_tan)**2,1)
w2tan_vals.shape = (len(w_tan)**2,1)
wtan_vals = np.concatenate((w1tan_vals,w2tan_vals),axis=1).T
# create tangent hyperplane formula, evaluate
h = lambda weh: g_val + (weh[0]-anchor[0])*grad_val[0] + (weh[1]-anchor[1])*grad_val[1]
h_vals = h(wtan_vals + anchor)
# vals for tangent
w1tan_vals += anchor[0]
w2tan_vals += anchor[1]
w1tan_vals.shape = (len(w_tan),len(w_tan))
w2tan_vals.shape = (len(w_tan),len(w_tan))
h_vals.shape = (len(w_tan),len(w_tan))
### plot tangent plane ###
ax.plot_surface(w1tan_vals, w2tan_vals, h_vals, alpha = 0.4,color = 'lime',zorder = 1,rstride=50, cstride=50,linewidth=0.5,edgecolor = 'k')
# plot the input function and clean up panel
def plot_func(func,view,ax):
# define input space
w_func = np.linspace(-2.5,2.5,200)
w1_vals, w2_vals = np.meshgrid(w_func,w_func)
w1_vals.shape = (len(w_func)**2,1)
w2_vals.shape = (len(w_func)**2,1)
w_vals = np.concatenate((w1_vals,w2_vals),axis=1).T
g_vals = func(w_vals)
w1_vals.shape = (len(w_func),len(w_func))
w2_vals.shape = (len(w_func),len(w_func))
g_vals.shape = (len(w_func),len(w_func))
### plot function ###
ax.plot_surface(w1_vals, w2_vals, g_vals, alpha = 0.1,color = 'w',rstride=25, cstride=25,linewidth=0.75,edgecolor = 'k',zorder = 2)
# clean up the plot while you're at it
cleanup(g_vals,view,ax)
# cleanup an input panel
def cleanup(g_vals,view,ax):
### clean up plot ###
# plot x and y axes, and clean up
ax.xaxis.pane.fill = False
ax.yaxis.pane.fill = False
ax.zaxis.pane.fill = False
ax.xaxis.pane.set_edgecolor('white')
ax.yaxis.pane.set_edgecolor('white')
ax.zaxis.pane.set_edgecolor('white')
### plot z=0 plane ###
w_zplane = np.linspace(-3,3,200)
w1_zplane_vals, w2_zplane_vals = np.meshgrid(w_zplane,w_zplane)
ax.plot_surface(w1_zplane_vals, w2_zplane_vals, np.zeros(np.shape(w1_zplane_vals)), alpha = 0.1,color = 'w',zorder = 1,rstride=25, cstride=25,linewidth=0.3,edgecolor = 'k')
# bolden axis on z=0 plane
ax.plot(w_zplane,w_zplane*0,w_zplane*0,color = 'k',linewidth = 1.5)
ax.plot(w_zplane*0,w_zplane,w_zplane*0,color = 'k',linewidth = 1.5)
# remove axes lines and tickmarks
ax.w_zaxis.line.set_lw(0.)
ax.set_zticks([])
ax.w_xaxis.line.set_lw(0.)
ax.set_xticks([])
ax.w_yaxis.line.set_lw(0.)
ax.set_yticks([])
# set viewing angle
ax.view_init(view[0],view[1])
# set vewing limits
y = 3
ax.set_xlim([-y,y])
ax.set_ylim([-y,y])
zmin = min(np.min(g_vals),-0.5)
zmax = max(np.max(g_vals),+0.5)
ax.set_zlim([zmin,zmax])
# label plot
fontsize = 12
ax.set_xlabel(r'$w_1$',fontsize = fontsize,labelpad = -20)
ax.set_ylabel(r'$w_2$',fontsize = fontsize,rotation = 0,labelpad=-20)
| [
"[email protected]"
] | |
6b9526667b08d7040fec47867c5eee476f0b6768 | 0e638cd11c1ac64dcd1672936a1b0d7d545ee29f | /src/str_upper_double_byte_char.py | 3eaec60af1cd6abf0f8892b0fc5546c0ec3e517c | [] | no_license | simon-ritchie/python-novice-book | 2557d397a8f6025b63f3173c24bd4dcdb48aef8c | 1492adf603ba4dd1e9fadb48b74e49887c917dc6 | refs/heads/master | 2020-12-26T16:47:36.945581 | 2020-08-04T12:16:10 | 2020-08-04T12:16:10 | 237,569,248 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 43 | py | txt = 'Apple'
print(txt.upper()) | [
"[email protected]"
] | |
01d4e8dcf4a15b667c439fd1bdde4d23f2aad2fb | 0206ac23a29673ee52c367b103dfe59e7733cdc1 | /src/crcm5/analyze_rpn/plot_seasonal_means_from_daily_files.py | ab4e79a4d5f0103b3070eb6fa9639041fa46cd98 | [] | no_license | guziy/RPN | 2304a93f9ced626ae5fc8abfcc079e33159ae56a | 71b94f4c73d4100345d29a6fbfa9fa108d8027b5 | refs/heads/master | 2021-11-27T07:18:22.705921 | 2021-11-27T00:54:03 | 2021-11-27T00:54:03 | 2,078,454 | 4 | 3 | null | null | null | null | UTF-8 | Python | false | false | 3,284 | py | from datetime import datetime
from pathlib import Path
from matplotlib import cm
from matplotlib.colors import BoundaryNorm
from rpn.domains.rotated_lat_lon import RotatedLatLon
from application_properties import main_decorator
from mpl_toolkits.basemap import cm as cm_basemap, Basemap
__author__ = 'huziy'
from rpn.rpn import RPN
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import cartopy.crs as ccrs
import os
"""
Read data from an RPN file, calculate daily mean fields and plot them
"""
@main_decorator
def main():
path = "/RESCUE/skynet3_rech1/huziy/CNRCWP/Calgary_flood/Global_NA_v1/Samples/Global_NA_v1_201306/pm2013010100_00017280p"
varname = "PR"
plot_units = "mm/day"
mult_coeff = 1000 * 24 * 3600
add_offset = 0
img_folder = "/RESCUE/skynet3_rech1/huziy/CNRCWP/Calgary_flood/glob_sim/{}/monthly/{}".format(varname, os.path.basename(path))
img_folder = Path(img_folder)
if not img_folder.is_dir():
img_folder.mkdir(parents=True)
r = RPN(path=path)
pr = r.get_all_time_records_for_name(varname=varname)
lons2d, lats2d = r.get_longitudes_and_latitudes_for_the_last_read_rec()
rll = RotatedLatLon(**r.get_proj_parameters_for_the_last_read_rec())
bmp = rll.get_basemap_object_for_lons_lats(lons2d=lons2d,
lats2d=lats2d,
resolution="c", no_rot=True)
# bmp = Basemap(projection="robin", lon_0=180)
xx, yy = bmp(lons2d, lats2d)
dates = list(sorted(pr.keys()))
data = np.array([pr[d] for d in dates])
p = pd.Panel(data=data, items=dates, major_axis=range(data.shape[1]), minor_axis=range(data.shape[2]))
# p_daily = p.groupby(lambda d: d.day, axis="items").mean()
p_daily = p.apply(np.mean, axis="items")
print(p_daily.head())
lons2d[lons2d > 180] -= 360
bmap_params = bmp.projparams
bmap_params.update({
'llcrnrlon': lons2d[0, 0], 'urcrnrlon': lons2d[-1, -1], 'llcrnrlat': lats2d[0, 0], 'urcrnrlat': lats2d[-1, -1]
})
rpole_crs = ccrs.RotatedPole(pole_longitude=bmap_params["lon_0"] + 180,
pole_latitude=bmap_params["o_lat_p"])
clevs = [0, 0.01, 0.1, 1, 1.5, 2, 5, 10, 20, 40, 60, 80]
norm = BoundaryNorm(clevs, ncolors=len(clevs) - 1)
cmap = cm.get_cmap(cm_basemap.s3pcpn, len(clevs) - 1)
field = p_daily.values * mult_coeff + add_offset
fig = plt.figure()
plt.title("{}, {}/{}/{}, {}".format(varname, 1, dates[0].month, dates[0].year, plot_units))
ax = plt.axes(projection=rpole_crs)
ax.coastlines(resolution='110m')
ax.gridlines()
ax.gridlines()
# cs = bmp.contourf(xx, yy, field, clevs, norm=norm, extend="max", cmap=cmap)
cs = ax.pcolormesh(lons2d[:-1, :-1], lats2d[:-1, :-1], field[:-1, :-1], norm=norm, cmap=cmap, transform=rpole_crs)
plt.colorbar(cs, ticks=clevs, extend="max", ax=ax)
img_file = img_folder.joinpath("{:02d}-{:02d}-{}.png".format(1, dates[0].month, dates[0].year))
# bmp.drawcoastlines()
# bmp.drawstates()
# bmp.drawcounties()
# bmp.drawcountries()
plt.savefig(img_file.open("wb"))
plt.close(fig)
print(pr[dates[0]].mean())
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
9b1ccf4577a0ee9a0044fe6dbc23a91a8f570aaf | 9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97 | /sdBs/AllRun/sdssj_115035.62+253205.1/sdB_sdssj_115035.62+253205.1_lc.py | 696ade788c191929320b6afdc107af3e3807ea07 | [] | no_license | tboudreaux/SummerSTScICode | 73b2e5839b10c0bf733808f4316d34be91c5a3bd | 4dd1ffbb09e0a599257d21872f9d62b5420028b0 | refs/heads/master | 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 370 | py | from gPhoton.gAperture import gAperture
def main():
gAperture(band="NUV", skypos=[177.648417,25.53475], stepsz=30., csvfile="/data2/fleming/GPHOTON_OUTPU/LIGHTCURVES/sdBs/sdB_sdssj_115035.62+253205.1/sdB_sdssj_115035.62+253205.1_lc.csv", maxgap=1000., overwrite=True, radius=0.00555556, annulus=[0.005972227,0.0103888972], verbose=3)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
5b788699d45a5655649f9b4f4c5222a5daa8ec11 | e53cfac2eadeaf9727ec055b4cd524320cf8ded5 | /26.04/Вариант 19 Поляков/8-1942.py | 397399f64c005a1bdde980e0ae6acdaec09aeb18 | [] | no_license | NevssZeppeli/my_ege_solutions | 55322d71dcc9980e2cf894ea2b88689dca807943 | 7a25c93b8a58b03d0450627f1217972fbf7d04f6 | refs/heads/master | 2023-06-17T19:58:28.388391 | 2021-07-03T16:03:36 | 2021-07-03T16:03:36 | 380,223,538 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 435 | py | с = 0
for x in 'НОДА':
for y in 'НОДА':
if x == y: continue
for w in 'НОДА':
if w in x + y: continue
for z in 'НОДА':
if z in x + y + w: continue
word = x+y+z+w
if ('ОА' not in word) and ('АО' not in word) and ('НД' not in word) and ('ДН' not in word):
print(word)
| [
"[email protected]"
] | |
2fa66bbef81d04439a27bab63b84bd6d9471f1b1 | f35bb12066639698a94847cba4b4628aede1da70 | /contests/python/atcoder_beginners_selection/01_ABC086A/main.py | 7d081a94817ae4b9d8a1a2b4c664fcea284635be | [] | no_license | fly1tkg/atcoder-python-note | 7e74382a8867b07bb7a926988ac854a3b84e020b | 6051b771c0a0399ce8caf1e24256a9909101b0e7 | refs/heads/main | 2023-08-26T23:52:14.766576 | 2021-10-30T11:58:38 | 2021-10-30T11:58:38 | 363,686,005 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 110 | py | a, b = map(int, input().split())
result = a * b
if result % 2 == 0:
print("Even")
else:
print("Odd") | [
"[email protected]"
] | |
effba01bf3546bb14704b8e9e1a0030ce9ce98ac | 37d8802ecca37cc003053c2175f945a501822c82 | /11-拓扑排序/0210-课程表 II .py | 74c246f32580ae2fcd91dba5d7eed54baa543dbb | [
"Apache-2.0"
] | permissive | Sytx74/LeetCode-Solution-Python | cc0f51e31a58d605fe65b88583eedfcfd7461658 | b484ae4c4e9f9186232e31f2de11720aebb42968 | refs/heads/master | 2020-07-04T18:17:24.781640 | 2019-07-30T03:34:19 | 2019-07-30T03:34:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,161 | py | # 210. 课程表 II
# 现在你总共有 n 门课需要选,记为 0 到 n-1。
# 在选修某些课程之前需要一些先修课程。 例如,想要学习课程 0 ,你需要先完成课程 1 ,我们用一个匹配来表示他们: [0,1]
# 给定课程总量以及它们的先决条件,返回你为了学完所有课程所安排的学习顺序。
# 可能会有多个正确的顺序,你只要返回一种就可以了。如果不可能完成所有课程,返回一个空数组。
class Solution(object):
def findOrder(self, numCourses, prerequisites):
"""
:type numCourses: int 课程门数
:type prerequisites: List[List[int]] 课程与课程之间的关系
:rtype: bool
"""
# 课程的长度
clen = len(prerequisites)
if clen == 0:
# 没有课程,当然可以完成课程的学习
return [i for i in range(numCourses)]
# 入度数组,一开始全部为 0
in_degrees = [0 for _ in range(numCourses)]
# 邻接表
adj = [set() for _ in range(numCourses)]
# 想要学习课程 0 ,你需要先完成课程 1 ,我们用一个匹配来表示他们: [0,1]
# 1 -> 0,这里要注意:不要弄反了
for second, first in prerequisites:
in_degrees[second] += 1
adj[first].add(second)
# print("in_degrees", in_degrees)
# 首先遍历一遍,把所有入度为 0 的结点加入队列
res = []
queue = []
for i in range(numCourses):
if in_degrees[i] == 0:
queue.append(i)
while queue:
top = queue.pop(0)
res.append(top)
for successor in adj[top]:
in_degrees[successor] -= 1
if in_degrees[successor] == 0:
queue.append(successor)
if len(res) != numCourses:
return []
return res
if __name__ == '__main__':
numCourses = 4
prerequisites = [[1, 0], [2, 0], [3, 1], [3, 2]]
solution = Solution()
result = solution.findOrder(numCourses, prerequisites)
print(result)
| [
"[email protected]"
] | |
fcb705deeef62d671fac51dff40c11b3bac40179 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03729/s949028843.py | 45c50409e0f846d9c29adc30f5988d605b546202 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 153 | py | Tmp = []
Tmp = input().rstrip().split(' ')
S1 = Tmp[0]
S2 = Tmp[1]
S3 = Tmp[2]
if S1[-1]==S2[0] and S2[-1]==S3[0]:
print('YES')
else:
print('NO')
| [
"[email protected]"
] | |
b2a55e6803a8cb82449b0f4480ce73159caf442d | 3a570384a3fa9c4c7979d33b182556e1c637e9eb | /anwmisc/anwp/sims/missile1_h.py | 520ed4c58d7fbb6a3dc46bfb12ad87458d033f4b | [] | no_license | colshag/ANW | 56a028af5042db92b5ead641dc542fcb4533344e | 46948d8d18a0639185dd4ffcffde126914991553 | refs/heads/master | 2020-03-27T00:22:49.409109 | 2018-10-27T06:37:04 | 2018-10-27T06:37:04 | 145,618,125 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 498 | py | # ---------------------------------------------------------------------------
# Armada Net Wars (ANW)
# missile1_H.py
# Written by Chris Lewis
# ---------------------------------------------------------------------------
# Represents a missile sim in the simulator
# ---------------------------------------------------------------------------
from OpenGL import GL
centerX = 0
centerY = 0
numFrames = 1
h=8
w=4
points = ( (-h,-w), (h,-w), (h,w), (-h,w) )
primitives = [ (GL.GL_QUADS, (0,1,2,3)) ]
| [
"[email protected]"
] | |
c72220bcf74725873eb70e15d44322f5a0bf2f6d | 0ffb18f4d58961ca675d8294eb2154f69061989f | /auto_process_ngs/auto_processor.py | 42f47a8685a1fde7de0137cf03dc62f7d54b3aca | [] | no_license | nandr0id/auto_process_ngs | a794e904e6d24b0e0403941b44c884374f95850e | 9b09f20b344d0ee87227e8771a479aa7c04f1837 | refs/heads/master | 2020-06-26T03:23:53.225029 | 2019-06-12T12:11:32 | 2019-06-12T12:11:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 38,116 | py | #!/usr/bin/env python
#
# auto_processor.py: automated processing of Illumina sequence data
# Copyright (C) University of Manchester 2013-2019 Peter Briggs
#
#########################################################################
#
# auto_processor.py
#
#########################################################################
#######################################################################
# Imports
#######################################################################
import sys
import os
import subprocess
import logging
import shutil
import uuid
import time
import ast
import gzip
import urllib2
import bcftbx.IlluminaData as IlluminaData
import bcftbx.TabFile as TabFile
import bcftbx.utils as bcf_utils
import bcftbx.htmlpagewriter as htmlpagewriter
from bcftbx.JobRunner import fetch_runner
from bcftbx.FASTQFile import FastqIterator
import config
import commands
import applications
import analysis
import metadata
import fileops
import utils
import simple_scheduler
import bcl2fastq_utils
import samplesheet_utils
from .settings import Settings
from .exceptions import MissingParameterFileException
from auto_process_ngs import get_version
#######################################################################
# Decorators
#######################################################################
def add_command(name,f):
"""
Add a method to a class
Implements an '@add_command' decorator which can be
used to add a function to a class as a new method
(aka 'command').
For example:
>>> def hello(cls):
... print "Hello %s" % cls.person
...
>>> @command("greeting",hello)
... class Example:
... def __init__(self):
... self.person = "World"
...
>>> Example().greeting()
Running 'greeting' command
Hello World
'greeting': finished
The function must accept a class instance as the
first argument.
"""
def wrapped_func(*args,**kws):
# Wraps execution of the supplied
# function to trap exceptions and
# add additional commentary
print "[%s] Running '%s' command" % (timestamp(),name)
try:
ret = f(*args,**kws)
except Exception as ex:
logging.fatal("%s: %s" % (name,ex))
ret = 1
else:
print "[%s] %s: finished" % (timestamp(),name)
return ret
def timestamp():
# Return the current time
return time.strftime("%Y-%m-%d %H:%M:%S")
def wrapper(cls):
# Adds the supplied function to
# to the class
setattr(cls,name,wrapped_func)
return cls
return wrapper
#######################################################################
# Classes
#######################################################################
@add_command("setup",commands.setup)
@add_command("make_fastqs",commands.make_fastqs)
@add_command("analyse_barcodes",commands.analyse_barcodes)
@add_command("merge_fastq_dirs",commands.merge_fastq_dirs)
@add_command("setup_analysis_dirs",commands.setup_analysis_dirs)
@add_command("run_qc",commands.run_qc)
@add_command("publish_qc",commands.publish_qc_cmd.publish_qc)
@add_command("archive",commands.archive_cmd.archive)
@add_command("report",commands.report_cmd.report)
@add_command("update_fastq_stats",commands.update_fastq_stats)
@add_command("import_project",commands.import_project)
@add_command("clone",commands.clone)
class AutoProcess(object):
"""
Class implementing an automatic fastq generation and QC
processing procedure for Illumina sequencing data
"""
def __init__(self,analysis_dir=None,settings=None,
allow_save_params=True):
"""
Create a new AutoProcess instance
Arguments:
analysis_dir (str): name/path for existing analysis
directory
settings (Settings): optional, if supplied then should
be a Settings instance; otherwise use a default
instance populated from the installation-specific
'settings.ini' file
allow_save_params (bool): if True then allow updates
to parameters to be saved back to the parameter file
(this is the default)
"""
# Initialise
self._master_log_dir = "logs"
self._log_dir = self._master_log_dir
# Load configuration settings
if settings is None:
settings = Settings()
self.settings = settings
# Create empty parameter and metadata set
self.params = metadata.AnalysisDirParameters()
self.metadata = metadata.AnalysisDirMetadata()
# Set flags to indicate whether it's okay to save parameters
self._save_params = False
self._save_metadata = False
# Set where the analysis directory actually is
self.analysis_dir = analysis_dir
if self.analysis_dir is not None:
# Load parameters
self.analysis_dir = os.path.abspath(self.analysis_dir)
try:
self.load_parameters(allow_save=allow_save_params)
except MissingParameterFileException, ex:
# No parameter file
logging.warning("Failed to load parameters: %s (ignored)" % ex)
logging.warning("Perhaps this is not an auto_process project?")
# Attempt to detect existing data directory
self.params['unaligned_dir'] = self.detect_unaligned_dir()
if self.params.unaligned_dir is None:
logging.warning("Unable to find subdirectory containing data")
except Exception, ex:
logging.error("Failed to load parameters: %s" % ex)
logging.error("Stopping")
sys.exit(1)
self.params['analysis_dir'] = self.analysis_dir
# Load metadata
try:
self.load_metadata(allow_save=allow_save_params)
except MissingParameterFileException, ex:
# No metadata file
logging.warning("Failed to load metadata: %s (ignored)" % ex)
logging.warning("Consider running metadata --update?")
except Exception, ex:
# Some other problem
logging.error("Failed to load metadata: %s" % ex)
logging.error("Stopping")
sys.exit(1)
def add_directory(self,sub_dir):
# Add a directory to the AutoProcess object
dirn = os.path.join(self.analysis_dir,sub_dir)
self.create_directory(dirn)
return dirn
def create_directory(self,dirn):
# Make the specified directory, and any leading directories
# that don't already exist
if not os.path.exists(dirn):
dir_path = os.sep
for sub_dir in dirn.split(os.sep):
dir_path = os.path.join(dir_path,sub_dir)
if not os.path.exists(dir_path):
print "Making %s" % dir_path
bcf_utils.mkdir(dir_path)
def load_parameters(self,allow_save=True):
"""
Load parameter values from file
Arguments:
allow_save (boolean): if True then allow params to be
saved back to the parameter file (the default);
otherwise don't allow save.
"""
# check for parameter file
if not self.has_parameter_file:
raise MissingParameterFileException(
"No parameter file %s" % self.parameter_file)
# Read contents of parameter file and assign values
logging.debug("Loading parameters from %s" %
self.parameter_file)
self.params.load(self.parameter_file,strict=False)
# File exists and can be read so set flag accordingly
self._save_params = allow_save
def save_parameters(self,alt_parameter_file=None,force=False):
"""
Save parameters to file
Arguments:
alt_parameter_file (str): optional, path to an
'alternative' parameter file; otherwise
parameters are saved to the default file for the
processing directory.
force (boolean): if True then force the parameters
to be saved even if saving was previously
turned off (default is False i.e. don't force
save).
"""
if self._save_params or force:
if alt_parameter_file is None:
self.params.save(self.parameter_file)
else:
self.params.save(alt_parameter_file)
def load_metadata(self,allow_save=True):
"""
Load metadata values from file
Arguments:
allow_save (boolean): if True then allow metadata
items to be saved back to the metadata file (the
default); otherwise don't allow save.
"""
# check for metadata file
if not os.path.exists(self.metadata_file):
raise MissingParameterFileException(
"No metadata file %s" % self.metadata_file)
# Read contents of metadata file and assign values
logging.debug("Loading metadata from %s" % self.metadata_file)
self.metadata.load(self.metadata_file)
# File exists and can be read so set flag accordingly
self._save_metadata = allow_save
def save_metadata(self,alt_metadata_file=None,force=False):
"""
Save metadata to file
Arguments:
alt_metadata_file (str): optional, path to an
'alternative' metadata file; otherwise
metadata are saved to the default file for the
processing directory.
force (boolean): if True then force the metadata
to be saved even if saving was previously
turned off (default is False i.e. don't force
save).
"""
if self._save_metadata or force:
if alt_metadata_file is None:
self.metadata.save(self.metadata_file)
else:
self.metadata.save(alt_metadata_file)
def update_metadata(self):
"""
Migrates and updates metadata values
"""
# Migrate missing values from parameter file
if self.has_parameter_file:
# Migrate relevant values across
print "Migrating metadata values from parameter file"
for param in ('platform','run_number','source','assay'):
if param not in self.params:
continue
if self.metadata[param] is None:
logging.debug("Importing metadata item '%s': set to "
"'%s'" % (param,self.params[param]))
print "Importing metadata item '%s'" % param
self.metadata[param] = self.params[param]
# Run name
if self.metadata.run_name is None:
print "Attempting to set missing 'run_name' metadata item"
self.metadata['run_name'] = self.run_name
# Instrument-related metadata
if self.metadata.instrument_name is None or \
self.metadata.instrument_datestamp is None or \
self.metadata.instrument_run_number is None:
print "Attempting to set missing instrument metadata items"
# Extract from run name
try:
datestamp,instrument,run_number,\
flow_cell_prefix,flow_cell_id = \
IlluminaData.split_run_name_full(self.run_name)
if self.metadata.instrument_name is None:
self.metadata['instrument_name'] = instrument
if self.metadata.instrument_datestamp is None:
self.metadata['instrument_datestamp'] = datestamp
if self.metadata.instrument_run_number is None:
self.metadata['instrument_run_number'] = run_number
if self.metadata.instrument_flow_cell_id is None:
self.metadata['instrument_flow_cell_id'] = \
flow_cell_prefix + flow_cell_id
except Exception as ex:
logging.warning("Unable to extract missing instrument metadata "
"from run name")
# Sequencing platform
if self.metadata.platform is None:
# Attempt to look up the instrument name
platform = bcl2fastq_utils.get_sequencer_platform(
self.analysis_dir,
instrument=self.metadata.instrument_name,
settings=self.settings)
if platform:
print "Setting 'platform' metadata item to %s" % \
platform
self.metadata['platform'] = platform
def edit_samplesheet(self):
"""
Bring up SampleSheet in an editor
"""
# Fetch the sample sheet
sample_sheet_file = self.params.sample_sheet
if sample_sheet_file is None:
logging.error("No sample sheet file to edit")
return
utils.edit_file(sample_sheet_file)
# Check updated sample sheet and issue warnings
if samplesheet_utils.check_and_warn(sample_sheet_file=sample_sheet_file):
logging.error("Sample sheet may have problems, see warnings above")
def init_readme(self):
"""
Create a new README file
"""
if self.readme_file is None:
readme_file = os.path.join(self.analysis_dir,'README')
print "Initialising %s" % readme_file
with open(readme_file,'w') as fp:
title = "Processing notes for %s" % \
os.path.basename(self.analysis_dir)
fp.write("%s\n%s\n" % (title,'='*len(title)))
else:
logging.warning("'%s' already exists" % self.readme_file)
def edit_readme(self):
"""
Bring up README in an editor
"""
if self.readme_file is None:
logging.error("No README file to edit")
return
utils.edit_file(self.readme_file,
append="\n[%s]" % time.ctime())
def load_illumina_data(self,unaligned_dir=None):
# Load and return an IlluminaData object
if unaligned_dir is None:
unaligned_dir = self.params.unaligned_dir
if unaligned_dir is None:
logging.error("Unaligned directory not specified, cannot load data")
return None
return IlluminaData.IlluminaData(self.analysis_dir,
unaligned_dir=unaligned_dir)
def load_project_metadata(self,project_metadata_file='projects.info',
check=True,update=False):
# Load data from 'projects.info' metadata file which lists
# and describes projects
# check: if True then check existing metadata for consistency with fastq files
# update: if True then update inconsistent metadata (i.e. add missing projects
# and remove ones that are inconsistent); implies 'check=True'
if project_metadata_file is not None:
filen = os.path.join(self.params.analysis_dir,project_metadata_file)
else:
filen = None
logging.debug("Project metadata file: %s" % filen)
try:
illumina_data = self.load_illumina_data()
except IlluminaData.IlluminaDataError,ex:
logging.warning("Failed to load data from bcl2fastq output "
"(ignored): %s" % ex)
illumina_data = None
projects_from_dirs = self.get_analysis_projects_from_dirs()
if filen is not None and os.path.exists(filen):
# Load existing file and check for consistency
logging.debug("Loading project metadata from existing file")
project_metadata = metadata.ProjectMetadataFile(filen)
else:
# First try to populate basic metadata from existing projects
logging.debug("Metadata file not found, guessing basic data")
project_metadata = metadata.ProjectMetadataFile()
projects = projects_from_dirs
if not projects:
# Get information from fastq files
logging.warning("No existing project directories detected")
logging.debug("Use fastq data from 'unaligned' directory")
if illumina_data is None:
# Can't even get fastq files
logging.warning("Failed to load fastq data from '%s'" %
self.params.unaligned_dir)
else:
projects = illumina_data.projects
# Populate the metadata file list of projects
logging.debug("Project\tSample\tFastq")
for project in projects:
project_name = project.name
sample_names = []
for sample in project.samples:
sample_name = sample.name
for fastq in sample.fastq:
logging.debug("%s\t%s\t%s" % (project_name,sample_name,fastq))
sample_names.append(sample_name)
project_metadata.add_project(project_name,sample_names)
# Turn off redundant checking/updating
check = False
update = False
# Perform consistency check or update
if check or update:
# Check that each project listed actually exists
bad_projects = []
for line in project_metadata:
pname = line['Project']
test_project = analysis.AnalysisProject(
pname,os.path.join(self.analysis_dir,pname))
if not test_project.is_analysis_dir:
# Project doesn't exist
logging.warning("Project '%s' listed in metadata file doesn't exist" \
% pname)
bad_projects.append(line)
# Remove bad projects
if update:
logging.debug("Removing non-existent projects")
for bad_project in bad_projects:
del(bad_project)
# Check that all actual projects are listed
for project in projects_from_dirs:
if len(project_metadata.lookup('Project',project.name)) == 0:
# Project not listed
if project.name != 'undetermined':
logging.warning("Project '%s' not listed in metadata file" %
project.name)
if update:
# Add line for unlisted project
logging.debug("Adding basic data for project '%s'" % project.name)
sample_names = []
for sample in project.samples:
sample_name = sample.name
for fastq in sample.fastq:
logging.debug("%s\t%s\t%s" % (project.name,sample_name,fastq))
sample_names.append(sample_name)
project_metadata.add_project(project.name,sample_names)
# Return the metadata object
return project_metadata
def update_project_metadata_file(self,unaligned_dir=None,
project_metadata_file='projects.info'):
"""
Update project metadata file from bcl2fastq outputs
Updates the contents of the project metadata file
(default: "projects.info") from a bcl-to-fastq output
directory, by adding new entries for projects in the
bcl-to-fastq outputs which don't currently appear.
Arguments:
unaligned_dir (str): path to the bcl-to-fastq
output directory relative to the analysis dir.
Defaults to the unaligned dir stored in the
analysis directory parameter file.
project_metatadata_file (str): optional, path to
the project metadata file to update
"""
if project_metadata_file is not None:
self.params['project_metadata'] = project_metadata_file
print "Project metadata file: %s" % self.params.project_metadata
filen = os.path.join(self.analysis_dir,
self.params.project_metadata)
if unaligned_dir is not None:
self.params['unaligned_dir'] = unaligned_dir
print "Unaligned_dir: %s" % self.params.unaligned_dir
illumina_data = IlluminaData.IlluminaData(
self.analysis_dir,
unaligned_dir=self.params.unaligned_dir)
if os.path.exists(filen):
# Load data from existing file
print "Loading project metadata from existing file: %s" % filen
project_metadata = metadata.ProjectMetadataFile(filen)
else:
# New (empty) metadata file
print "Creating new project metadata file: %s" % filen
project_metadata = metadata.ProjectMetadataFile()
# Populate/update
for project in illumina_data.projects:
project_name = project.name
sample_names = [s.name for s in project.samples]
if project_name not in project_metadata:
project_metadata.add_project(project_name,sample_names)
else:
project_metadata.update_project(project_name,
sample_names=sample_names)
# Save
project_metadata.save(filen)
def detect_unaligned_dir(self):
# Attempt to detect an existing 'bcl2fastq' or 'Unaligned' directory
# containing data from bcl2fastq
for test_unaligned in ('bcl2fastq','Unaligned'):
if os.path.isdir(os.path.join(self.analysis_dir,test_unaligned)):
logging.debug("Testing subdirectory '%s' to see if it has sequence data" %
test_unaligned)
try:
IlluminaData.IlluminaData(self.analysis_dir,
unaligned_dir=test_unaligned)
print "Setting 'unaligned_dir' parameter to %s" % test_unaligned
return test_unaligned
except IlluminaData.IlluminaDataError, ex:
logging.debug("Unable to load data from %s" % test_unaligned)
# Unable to detect existing data directory
return None
def set_log_dir(self,path):
"""
(Re)set the path for the log directory
If supplied ``path`` is relative then make a
subdirectory in the existing log directory
Arguments:
path (str): path for the log directory
Returns:
String: Full path for the new log directory.
"""
# If the new directory doesn't already exist then
# create it
if os.path.isabs(path):
self._log_dir = path
else:
self._log_dir = os.path.join(self.analysis_dir,
self._master_log_dir,
path)
return self.log_dir
def log_path(self,*args):
# Return path appended to log directory
# Use for getting paths of files under the logs directory
return os.path.join(self.log_dir,*args)
def get_log_subdir(self,name):
"""
Return the name for a new log subdirectory
Subdirectories are named as NNN_<name> e.g.
001_setup, 002_make_fastqs etc
Arguments:
name (str): name for the subdirectory
(typically the name of the processing
stage that will produce logs to be
written to the subdirs
Returns:
String: name for the new log subdirectory
(nb not the full path).
"""
return utils.get_numbered_subdir(
name,
parent_dir=os.path.join(self.analysis_dir,self._master_log_dir))
def __del__(self):
"""
Implement __del__ method
Peforms clean up operations (e.g. save parameters,
remove temporary files etc) when the AutoProcess
object is destroyed.
"""
if self.analysis_dir is None:
return
try:
if not os.path.exists(self.analysis_dir):
logging.warning("Analysis dir '%s' not found" %
self.analysis_dir)
return
tmp_dir = os.path.join(self.analysis_dir,'tmp')
if os.path.isdir(tmp_dir):
logging.debug("Removing %s" % tmp_dir)
import shutil
shutil.rmtree(tmp_dir)
logging.debug("Saving parameters to file")
self.save_parameters()
logging.debug("Saving metadata to file")
self.save_metadata()
except Exception as ex:
logging.warning("Exception trying to delete "
"AutoProcess instance: %s" %
ex)
@property
def run_name(self):
# Return run name
if self.metadata.run_name is not None:
return self.metadata.run_name
elif self.params.data_dir is not None:
return os.path.basename(self.params.data_dir)
else:
run_name = os.path.basename(self.params.analysis_dir)
if run_name.endswith('_analysis'):
# Strip trailing _analysis
run_name = run_name[:-len('_analysis')]
return run_name
@property
def log_dir(self):
# Generate and return full path to log directory
return self.add_directory(self._log_dir)
@property
def tmp_dir(self):
# Generate and return full path to tmp directory
return self.add_directory('tmp')
@property
def script_code_dir(self):
# Generate and return full path to ScriptCode directory
script_code = self.add_directory('ScriptCode')
# Put a README file in ScriptCode to make sure it's
# not pruned on subsequent rsync operations
readme = os.path.join(script_code,'README.txt')
if not os.path.exists(readme):
open(readme,'w').write("The ScriptCode directory is a "
"place to put custom scripts and programs\n")
@property
def readme_file(self):
# If the analysis dir contains a README file then
# return the full path; otherwise return None
readme_file = None
for name in ('README','README.txt'):
readme_file = os.path.join(self.analysis_dir,name)
if os.path.isfile(readme_file):
return readme_file
# No match found
return None
@property
def run_reference_id(self):
"""
Return a run reference id (e.g. 'HISEQ_140701/242#22')
"""
return analysis.run_reference_id(
self.run_name,
platform=self.metadata.platform,
facility_run_number=self.metadata.run_number)
@property
def parameter_file(self):
"""
Return name of parameter file ('auto_process.info')
"""
return os.path.join(self.analysis_dir,'auto_process.info')
@property
def has_parameter_file(self):
"""
Indicate if there is a parameter file (typically auto_process.info)
"""
return os.path.exists(os.path.join(self.parameter_file))
@property
def metadata_file(self):
"""
Return name of metadata file ('metadata.info')
"""
return os.path.join(self.analysis_dir,'metadata.info')
@property
def paired_end(self):
"""
Check if run is paired end
The endedness of the run is checked as follows:
- If there are analysis project directories then the
ended-ness is determined by checking the contents of
these directories
- If there are no project directories then the
ended-ness is determined from the contents of the
'unaligned' directory
Returns:
Boolean: True if run is paired end, False if single end,
None if endedness cannot be determined
"""
projects = self.get_analysis_projects_from_dirs()
if projects:
return reduce(lambda x,y: x and y.info.paired_end,
projects,True)
else:
try:
return self.load_illumina_data().paired_end
except IlluminaData.IlluminaDataError:
return None
def print_values(self,data):
"""
Print key/value pairs from a dictionary
"""
values = bcf_utils.OrderedDictionary()
values['Run reference'] = self.run_reference_id
for i in data:
values[i] = data[i]
field_width = max([len(i) for i in values])
for item in values:
print "%s: %s" % (item+' '*(field_width-len(item)),
values[item])
def set_param(self,key,value):
"""
Set an analysis directory parameter
Arguments:
key (str): parameter name
value (object): value to assign to the parameter
"""
if key in self.params:
print "Setting parameter '%s' to '%s'" % (key,value)
self.params[key] = value
else:
raise KeyError("Parameter 'key' not found" % key)
def print_params(self):
"""
Print the current parameter settings
"""
if self.has_parameter_file:
print "Parameters in %s:" % (os.path.basename(self.parameter_file))
else:
print "No parameters file found"
self.print_values(self.params)
def set_metadata(self,key,value):
"""
Set an analysis directory metadata item
Arguments:
key (str): parameter name
value (object): value to assign to the parameter
"""
if key in self.metadata:
print "Setting metadata item '%s' to '%s'" % (key,value)
self.metadata[key] = value
else:
raise KeyError("Metadata item 'key' not found" % key)
def print_metadata(self):
"""
Print the metadata items and associated values
"""
if os.path.exists(self.metadata_file):
print "Metadata in %s:" % (os.path.basename(self.metadata_file))
else:
print "No metadata file found"
self.print_values(self.metadata)
def make_project_metadata_file(self,project_metadata_file='projects.info'):
# Generate a project metadata file based on the fastq
# files and directory structure
project_metadata = self.load_project_metadata(
project_metadata_file=project_metadata_file,
update=True)
# Save to file
filen = os.path.join(self.params.analysis_dir,project_metadata_file)
project_metadata.save(filen)
self.params['project_metadata'] = project_metadata_file
print "Saving project metadata to %s" % self.params.project_metadata
def get_analysis_projects(self,pattern=None):
"""
Return the analysis projects in a list
By default returns all projects within the analysis
directory (including 'undetermined') which are listed
in the 'projects.info' metadata file.
If the 'pattern' is not None then it should be a simple
pattern used to match against available names to select
a subset of projects (see bcf_utils.name_matches).
If any project in 'projects.info' doesn't have a
matching analysis directory then an exception is
raised.
Note:
- If there is no 'projects.info' file then the projects
are taken from those in the 'unaligned' directory of
the analysis directory.
- If there is no 'unaligned' directory then the projects
are determined from the subdirectories in the analysis
directory.
Arguments:
pattern (str): optional pattern to select a subset
of projects (default: select all projects)
Returns:
List: list of AnalysisProject instances.
"""
project_metadata = self.load_project_metadata(
self.params.project_metadata)
projects = []
if pattern is None:
pattern = '*'
for line in project_metadata:
name = line['Project']
if not bcf_utils.name_matches(name,pattern):
# Name failed to match, ignore
continue
logging.debug("Acquiring data for project %s" % name)
# Look for a matching project directory
project_dir = None
dirs = bcf_utils.list_dirs(self.analysis_dir,startswith=name)
logging.debug("Possible matching directories: %s" % dirs)
if len(dirs) == 1:
# Just a single match
project_dir = dirs[0]
else:
# Multiple matches, look for an exact match
for d in dirs:
if d == name:
project_dir = name
break
if project_dir is None:
logging.error("Unable to resolve directory for project "
"'%s'" % name)
logging.error("Possible dirs: %s" % dirs)
raise Exception("Unable to resolve directory for project "
"'%s'" % name)
# Attempt to load the project data
project_dir = os.path.join(self.analysis_dir,project_dir)
projects.append(analysis.AnalysisProject(name,project_dir))
# Add undetermined reads directory
if bcf_utils.name_matches('undetermined',pattern):
undetermined_analysis = self.undetermined()
if undetermined_analysis is not None and \
'undetermined' not in [p.name for p in projects]:
projects.append(undetermined_analysis)
return projects
def get_analysis_projects_from_dirs(self,pattern=None):
"""
Return a list of AnalysisProjects in the analysis directory
Tests each of the subdirectories in the top-level of the
analysis directory and rejects any that appear to be
CASVAVA/bcl2fastq outputs or which don't successfully load
as AnalysisProject instances.
Unlike the `get_analysis_projects` method, no checking
against the project metadata (typically in 'projects.info')
is performed.
If the 'pattern' is not None then it should be a simple
pattern used to match against available names to select
a subset of projects (see bcf_utils.name_matches).
Arguments:
pattern (str): optional pattern to select a subset
of projects (default: select all projects)
Returns:
List: list of AnalysisProject instances.
"""
logging.debug("Testing subdirectories to determine analysis projects")
projects = []
if pattern is None:
pattern = '*'
# Try loading each subdirectory as a project
for dirn in bcf_utils.list_dirs(self.analysis_dir):
# Test for bcl2fastq output
try:
IlluminaData.IlluminaData(self.analysis_dir,
unaligned_dir=dirn)
logging.debug("* %s: rejected" % dirn)
continue
except IlluminaData.IlluminaDataError:
pass
except Exception as ex:
logging.debug("Exception when attempting to load "
"subdir '%s' as CASAVA/bcl2fastq output "
"(ignored): %s" % (dirn,ex))
# Try loading as a project
test_project = analysis.AnalysisProject(
dirn,os.path.join(self.analysis_dir,dirn))
if test_project.is_analysis_dir:
logging.debug("* %s: analysis directory" % dirn)
if bcf_utils.name_matches(test_project.name,
pattern):
projects.append(test_project)
else:
logging.debug("* %s: rejected" % dirn)
return projects
def undetermined(self):
# Return analysis project directory for undetermined indices
# or None if not found
dirs = bcf_utils.list_dirs(self.analysis_dir,matches='undetermined')
if len(dirs) == 0:
logging.debug("No undetermined analysis directory found")
return None
elif len(dirs) > 1:
raise Exception, "Found multiple undetermined analysis directories: %s" \
% ' '.join(dirs)
# Attempt to load the analysis project data
undetermined_dir = os.path.join(self.analysis_dir,dirs[0])
return analysis.AnalysisProject(dirs[0],undetermined_dir)
def log_analysis(self):
# Add a record of the analysis to the logging file
raise NotImplementedError
def check_metadata(self,items):
"""
Check that metadata items are set
For metadata items supplied as an iterable in 'items',
check that each is set to a non-null value. Report
those that are null.
Return False if one or more are null; otherwise return
True.
"""
# Check metadata
metadata_ok = True
for item in items:
if item in self.metadata.null_items():
metadata_ok = False
logging.warning("Metadata item '%s' is not set" % item)
return metadata_ok
| [
"[email protected]"
] | |
ea87a929be67a2abcf9fe9a15362b1768dcc7a70 | 9f7d4d76c7e66aa424a5f8723575dc489f1fd2ab | /2021/4/4.py | 473c4f3604c356f4364a7d7adc7f597ac43c4b9e | [
"MIT"
] | permissive | kristianwiklund/AOC | df5a873287304816f25d91259c6e6c99c7a5f4bf | d9a668c406d2fd1b805d9b6a34cffa237a33c119 | refs/heads/master | 2023-01-12T09:01:11.012081 | 2023-01-02T19:12:29 | 2023-01-02T19:12:29 | 227,458,380 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 598 | py | #!/usr/bin/python3
import csv,sys
from pprint import pprint
from bb import BB
# ------
randoms = next(sys.stdin).strip().split(",")
print(randoms)
boards = list()
try:
while True:
next(sys.stdin)
a = BB(sys.stdin)
boards.append(a)
# print(a)
except:
pass
for i in randoms:
print("Drawing ",i)
s = [x.draw(int(i)) for x in boards]
if sum(s):
break
for t in range(len(s)):
if s[t]:
s = boards[t].score()
break
print("Board ",t," is the board")
print("Score: ",int(i)*s)
| [
"[email protected]"
] | |
5ae4f7b8fa1951c2a6ad2cceeb98570947a922c2 | 849cd35166a93259c8bf84f001a3c40d9fe18b98 | /Homeworks/test.py | 4e8c67718adee043fe5c125fb2ffc073b240d0c2 | [] | no_license | Larionov0/Group2-lessons | 98c3d20d7532583ee66e766371235cfe888264c5 | 6426962e9b6766a9470ab1408b95486e63e4c2fa | refs/heads/master | 2023-05-07T01:42:57.290428 | 2021-05-27T17:52:02 | 2021-05-27T17:52:02 | 334,012,422 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,723 | py | trees = [
["Дуб", 5 , "д", "Д", 1,
[],
0
],
["Береза", 3, "б", "Б", 3,
[],
0
],
["Сосна", 1, "с", "С", 4,
[],
0
]
]
width = 6
height = 6
# матриця
matrix = []
i = 0
while i < height:
row = ['-'] * width
matrix.append(row)
i += 1
for row in matrix:
row_text = '|'
for element in row:
row_text += str(element) + ' '
row_text = row_text[:-1] + '|'
print(row_text)
# геймплей
while True:
# хід гравця
skip = input('Skip? так/ні: ')
if skip == 'ні':
x = int(input('x: '))
y = int(input('y: '))
tree = input('tree: ')
for el in trees:
if el[0] == tree and el[4] != 0:
treeInfo = []
treeChar = el[2]
treeMadeMoves = 0
treeX = x
treeY = y
treeInfo.append(treeChar)
treeInfo.append(treeMadeMoves)
treeInfo.append(treeX)
treeInfo.append(treeY)
el[5].append(treeInfo)
el[6] += 1
el[4] -= 1
# ріст дерев
for el in trees:
for tree in el[5]:
if tree[1] == el[1]:
tree[0] = el[3]
tree[1] += 1
print(trees)
for el in trees:
for tree in el[5]:
matrix[tree[3]-1][tree[2]-1] = tree[0]
# формування матриці
for row in matrix:
row_text = '|'
for element in row:
row_text += str(element) + ' '
row_text = row_text[:-1] + '|'
print(row_text)
| [
"[email protected]"
] | |
95eeaa6375a8e6693831fb82f1aa36d7b02af8d6 | d7016f69993570a1c55974582cda899ff70907ec | /sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2022_05_01/operations/_storage_accounts_operations.py | fca5324144c977450c513adec4de92ec91e8dd79 | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | kurtzeborn/azure-sdk-for-python | 51ca636ad26ca51bc0c9e6865332781787e6f882 | b23e71b289c71f179b9cf9b8c75b1922833a542a | refs/heads/main | 2023-03-21T14:19:50.299852 | 2023-02-15T13:30:47 | 2023-02-15T13:30:47 | 157,927,277 | 0 | 0 | MIT | 2022-07-19T08:05:23 | 2018-11-16T22:15:30 | Python | UTF-8 | Python | false | false | 123,655 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
from ..._serialization import Serializer
from .._vendor import _convert_request, _format_url_section
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_check_name_availability_request(subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Storage/checkNameAvailability"
)
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_create_request(
resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_delete_request(
resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs)
def build_get_properties_request(
resource_group_name: str,
account_name: str,
subscription_id: str,
*,
expand: Optional[Union[str, _models.StorageAccountExpand]] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
if expand is not None:
_params["$expand"] = _SERIALIZER.query("expand", expand, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_update_request(
resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs)
def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Storage/storageAccounts")
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_list_by_resource_group_request(resource_group_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_list_keys_request(
resource_group_name: str,
account_name: str,
subscription_id: str,
*,
expand: Literal["kerb"] = "kerb",
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/listKeys",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
if expand is not None:
_params["$expand"] = _SERIALIZER.query("expand", expand, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_regenerate_key_request(
resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/regenerateKey",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_list_account_sas_request(
resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/ListAccountSas",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_list_service_sas_request(
resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/ListServiceSas",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_failover_request(
resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/failover",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="POST", url=_url, params=_params, **kwargs)
def build_hierarchical_namespace_migration_request(
resource_group_name: str, account_name: str, subscription_id: str, *, request_type: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/hnsonmigration",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
_params["requestType"] = _SERIALIZER.query("request_type", request_type, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_abort_hierarchical_namespace_migration_request(
resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/aborthnsonmigration",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_restore_blob_ranges_request(
resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/restoreBlobRanges",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_revoke_user_delegation_keys_request(
resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/revokeUserDelegationKeys",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$"
),
"accountName": _SERIALIZER.url("account_name", account_name, "str", max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="POST", url=_url, params=_params, **kwargs)
class StorageAccountsOperations: # pylint: disable=too-many-public-methods
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.storage.v2022_05_01.StorageManagementClient`'s
:attr:`storage_accounts` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@overload
def check_name_availability(
self,
account_name: _models.StorageAccountCheckNameAvailabilityParameters,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.CheckNameAvailabilityResult:
"""Checks that the storage account name is valid and is not already in use.
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name:
~azure.mgmt.storage.v2022_05_01.models.StorageAccountCheckNameAvailabilityParameters
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CheckNameAvailabilityResult or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2022_05_01.models.CheckNameAvailabilityResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def check_name_availability(
self, account_name: IO, *, content_type: str = "application/json", **kwargs: Any
) -> _models.CheckNameAvailabilityResult:
"""Checks that the storage account name is valid and is not already in use.
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CheckNameAvailabilityResult or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2022_05_01.models.CheckNameAvailabilityResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def check_name_availability(
self, account_name: Union[_models.StorageAccountCheckNameAvailabilityParameters, IO], **kwargs: Any
) -> _models.CheckNameAvailabilityResult:
"""Checks that the storage account name is valid and is not already in use.
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Is either a model type or a IO type. Required.
:type account_name:
~azure.mgmt.storage.v2022_05_01.models.StorageAccountCheckNameAvailabilityParameters or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CheckNameAvailabilityResult or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2022_05_01.models.CheckNameAvailabilityResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.CheckNameAvailabilityResult]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(account_name, (IO, bytes)):
_content = account_name
else:
_json = self._serialize.body(account_name, "StorageAccountCheckNameAvailabilityParameters")
request = build_check_name_availability_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.check_name_availability.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("CheckNameAvailabilityResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
check_name_availability.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Storage/checkNameAvailability"} # type: ignore
def _create_initial(
self,
resource_group_name: str,
account_name: str,
parameters: Union[_models.StorageAccountCreateParameters, IO],
**kwargs: Any
) -> Optional[_models.StorageAccount]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.StorageAccount]]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "StorageAccountCreateParameters")
request = build_create_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._create_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("StorageAccount", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}"} # type: ignore
@overload
def begin_create(
self,
resource_group_name: str,
account_name: str,
parameters: _models.StorageAccountCreateParameters,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.StorageAccount]:
"""Asynchronously creates a new storage account with the specified parameters. If an account is
already created and a subsequent create request is issued with different properties, the
account properties will be updated. If an account is already created and a subsequent create or
update request is issued with the exact same set of properties, the request will succeed.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param parameters: The parameters to provide for the created account. Required.
:type parameters: ~azure.mgmt.storage.v2022_05_01.models.StorageAccountCreateParameters
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either StorageAccount or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.v2022_05_01.models.StorageAccount]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_create(
self,
resource_group_name: str,
account_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.StorageAccount]:
"""Asynchronously creates a new storage account with the specified parameters. If an account is
already created and a subsequent create request is issued with different properties, the
account properties will be updated. If an account is already created and a subsequent create or
update request is issued with the exact same set of properties, the request will succeed.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param parameters: The parameters to provide for the created account. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either StorageAccount or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.v2022_05_01.models.StorageAccount]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_create(
self,
resource_group_name: str,
account_name: str,
parameters: Union[_models.StorageAccountCreateParameters, IO],
**kwargs: Any
) -> LROPoller[_models.StorageAccount]:
"""Asynchronously creates a new storage account with the specified parameters. If an account is
already created and a subsequent create request is issued with different properties, the
account properties will be updated. If an account is already created and a subsequent create or
update request is issued with the exact same set of properties, the request will succeed.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param parameters: The parameters to provide for the created account. Is either a model type or
a IO type. Required.
:type parameters: ~azure.mgmt.storage.v2022_05_01.models.StorageAccountCreateParameters or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either StorageAccount or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.v2022_05_01.models.StorageAccount]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageAccount]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_initial( # type: ignore
resource_group_name=resource_group_name,
account_name=account_name,
parameters=parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("StorageAccount", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}"} # type: ignore
@distributed_trace
def delete( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, account_name: str, **kwargs: Any
) -> None:
"""Deletes a storage account in Microsoft Azure.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_delete_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}"} # type: ignore
@distributed_trace
def get_properties(
self,
resource_group_name: str,
account_name: str,
expand: Optional[Union[str, _models.StorageAccountExpand]] = None,
**kwargs: Any
) -> _models.StorageAccount:
"""Returns the properties for the specified storage account including but not limited to name, SKU
name, location, and account status. The ListKeys operation should be used to retrieve storage
keys.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param expand: May be used to expand the properties within account's properties. By default,
data is not included when fetching properties. Currently we only support geoReplicationStats
and blobRestoreStatus. Known values are: "geoReplicationStats" and "blobRestoreStatus". Default
value is None.
:type expand: str or ~azure.mgmt.storage.v2022_05_01.models.StorageAccountExpand
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageAccount or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2022_05_01.models.StorageAccount
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageAccount]
request = build_get_properties_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
expand=expand,
api_version=api_version,
template_url=self.get_properties.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("StorageAccount", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_properties.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}"} # type: ignore
@overload
def update(
self,
resource_group_name: str,
account_name: str,
parameters: _models.StorageAccountUpdateParameters,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.StorageAccount:
"""The update operation can be used to update the SKU, encryption, access tier, or tags for a
storage account. It can also be used to map the account to a custom domain. Only one custom
domain is supported per storage account; the replacement/change of custom domain is not
supported. In order to replace an old custom domain, the old value must be cleared/unregistered
before a new value can be set. The update of multiple properties is supported. This call does
not change the storage keys for the account. If you want to change the storage account keys,
use the regenerate keys operation. The location and name of the storage account cannot be
changed after creation.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param parameters: The parameters to provide for the updated account. Required.
:type parameters: ~azure.mgmt.storage.v2022_05_01.models.StorageAccountUpdateParameters
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageAccount or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2022_05_01.models.StorageAccount
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def update(
self,
resource_group_name: str,
account_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.StorageAccount:
"""The update operation can be used to update the SKU, encryption, access tier, or tags for a
storage account. It can also be used to map the account to a custom domain. Only one custom
domain is supported per storage account; the replacement/change of custom domain is not
supported. In order to replace an old custom domain, the old value must be cleared/unregistered
before a new value can be set. The update of multiple properties is supported. This call does
not change the storage keys for the account. If you want to change the storage account keys,
use the regenerate keys operation. The location and name of the storage account cannot be
changed after creation.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param parameters: The parameters to provide for the updated account. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageAccount or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2022_05_01.models.StorageAccount
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def update(
self,
resource_group_name: str,
account_name: str,
parameters: Union[_models.StorageAccountUpdateParameters, IO],
**kwargs: Any
) -> _models.StorageAccount:
"""The update operation can be used to update the SKU, encryption, access tier, or tags for a
storage account. It can also be used to map the account to a custom domain. Only one custom
domain is supported per storage account; the replacement/change of custom domain is not
supported. In order to replace an old custom domain, the old value must be cleared/unregistered
before a new value can be set. The update of multiple properties is supported. This call does
not change the storage keys for the account. If you want to change the storage account keys,
use the regenerate keys operation. The location and name of the storage account cannot be
changed after creation.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param parameters: The parameters to provide for the updated account. Is either a model type or
a IO type. Required.
:type parameters: ~azure.mgmt.storage.v2022_05_01.models.StorageAccountUpdateParameters or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageAccount or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2022_05_01.models.StorageAccount
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageAccount]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "StorageAccountUpdateParameters")
request = build_update_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.update.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("StorageAccount", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}"} # type: ignore
@distributed_trace
def list(self, **kwargs: Any) -> Iterable["_models.StorageAccount"]:
"""Lists all the storage accounts available under the subscription. Note that storage keys are not
returned; use the ListKeys operation for this.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either StorageAccount or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.v2022_05_01.models.StorageAccount]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageAccountListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("StorageAccountListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Storage/storageAccounts"} # type: ignore
@distributed_trace
def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.StorageAccount"]:
"""Lists all the storage accounts available under the given resource group. Note that storage keys
are not returned; use the ListKeys operation for this.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either StorageAccount or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.v2022_05_01.models.StorageAccount]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageAccountListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_by_resource_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("StorageAccountListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list_by_resource_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts"} # type: ignore
@distributed_trace
def list_keys(
self, resource_group_name: str, account_name: str, expand: Literal["kerb"] = "kerb", **kwargs: Any
) -> _models.StorageAccountListKeysResult:
"""Lists the access keys or Kerberos keys (if active directory enabled) for the specified storage
account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param expand: Specifies type of the key to be listed. Possible value is kerb. Known values are
"kerb" and None. Default value is "kerb".
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageAccountListKeysResult or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2022_05_01.models.StorageAccountListKeysResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageAccountListKeysResult]
request = build_list_keys_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
expand=expand,
api_version=api_version,
template_url=self.list_keys.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("StorageAccountListKeysResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/listKeys"} # type: ignore
@overload
def regenerate_key(
self,
resource_group_name: str,
account_name: str,
regenerate_key: _models.StorageAccountRegenerateKeyParameters,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.StorageAccountListKeysResult:
"""Regenerates one of the access keys or Kerberos keys for the specified storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param regenerate_key: Specifies name of the key which should be regenerated -- key1, key2,
kerb1, kerb2. Required.
:type regenerate_key:
~azure.mgmt.storage.v2022_05_01.models.StorageAccountRegenerateKeyParameters
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageAccountListKeysResult or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2022_05_01.models.StorageAccountListKeysResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def regenerate_key(
self,
resource_group_name: str,
account_name: str,
regenerate_key: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.StorageAccountListKeysResult:
"""Regenerates one of the access keys or Kerberos keys for the specified storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param regenerate_key: Specifies name of the key which should be regenerated -- key1, key2,
kerb1, kerb2. Required.
:type regenerate_key: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageAccountListKeysResult or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2022_05_01.models.StorageAccountListKeysResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def regenerate_key(
self,
resource_group_name: str,
account_name: str,
regenerate_key: Union[_models.StorageAccountRegenerateKeyParameters, IO],
**kwargs: Any
) -> _models.StorageAccountListKeysResult:
"""Regenerates one of the access keys or Kerberos keys for the specified storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param regenerate_key: Specifies name of the key which should be regenerated -- key1, key2,
kerb1, kerb2. Is either a model type or a IO type. Required.
:type regenerate_key:
~azure.mgmt.storage.v2022_05_01.models.StorageAccountRegenerateKeyParameters or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageAccountListKeysResult or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2022_05_01.models.StorageAccountListKeysResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.StorageAccountListKeysResult]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(regenerate_key, (IO, bytes)):
_content = regenerate_key
else:
_json = self._serialize.body(regenerate_key, "StorageAccountRegenerateKeyParameters")
request = build_regenerate_key_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.regenerate_key.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("StorageAccountListKeysResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
regenerate_key.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/regenerateKey"} # type: ignore
@overload
def list_account_sas(
self,
resource_group_name: str,
account_name: str,
parameters: _models.AccountSasParameters,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.ListAccountSasResponse:
"""List SAS credentials of a storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param parameters: The parameters to provide to list SAS credentials for the storage account.
Required.
:type parameters: ~azure.mgmt.storage.v2022_05_01.models.AccountSasParameters
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ListAccountSasResponse or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2022_05_01.models.ListAccountSasResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def list_account_sas(
self,
resource_group_name: str,
account_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.ListAccountSasResponse:
"""List SAS credentials of a storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param parameters: The parameters to provide to list SAS credentials for the storage account.
Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ListAccountSasResponse or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2022_05_01.models.ListAccountSasResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def list_account_sas(
self,
resource_group_name: str,
account_name: str,
parameters: Union[_models.AccountSasParameters, IO],
**kwargs: Any
) -> _models.ListAccountSasResponse:
"""List SAS credentials of a storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param parameters: The parameters to provide to list SAS credentials for the storage account.
Is either a model type or a IO type. Required.
:type parameters: ~azure.mgmt.storage.v2022_05_01.models.AccountSasParameters or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ListAccountSasResponse or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2022_05_01.models.ListAccountSasResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.ListAccountSasResponse]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "AccountSasParameters")
request = build_list_account_sas_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.list_account_sas.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("ListAccountSasResponse", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_account_sas.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/ListAccountSas"} # type: ignore
@overload
def list_service_sas(
self,
resource_group_name: str,
account_name: str,
parameters: _models.ServiceSasParameters,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.ListServiceSasResponse:
"""List service SAS credentials of a specific resource.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param parameters: The parameters to provide to list service SAS credentials. Required.
:type parameters: ~azure.mgmt.storage.v2022_05_01.models.ServiceSasParameters
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ListServiceSasResponse or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2022_05_01.models.ListServiceSasResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def list_service_sas(
self,
resource_group_name: str,
account_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.ListServiceSasResponse:
"""List service SAS credentials of a specific resource.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param parameters: The parameters to provide to list service SAS credentials. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ListServiceSasResponse or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2022_05_01.models.ListServiceSasResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def list_service_sas(
self,
resource_group_name: str,
account_name: str,
parameters: Union[_models.ServiceSasParameters, IO],
**kwargs: Any
) -> _models.ListServiceSasResponse:
"""List service SAS credentials of a specific resource.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param parameters: The parameters to provide to list service SAS credentials. Is either a model
type or a IO type. Required.
:type parameters: ~azure.mgmt.storage.v2022_05_01.models.ServiceSasParameters or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ListServiceSasResponse or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2022_05_01.models.ListServiceSasResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.ListServiceSasResponse]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ServiceSasParameters")
request = build_list_service_sas_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.list_service_sas.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("ListServiceSasResponse", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_service_sas.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/ListServiceSas"} # type: ignore
def _failover_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, account_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_failover_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._failover_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_failover_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/failover"} # type: ignore
@distributed_trace
def begin_failover(self, resource_group_name: str, account_name: str, **kwargs: Any) -> LROPoller[None]:
"""Failover request can be triggered for a storage account in case of availability issues. The
failover occurs from the storage account's primary cluster to secondary cluster for RA-GRS
accounts. The secondary cluster will become primary after failover.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._failover_initial( # type: ignore
resource_group_name=resource_group_name,
account_name=account_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method = cast(
PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs)
) # type: PollingMethod
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_failover.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/failover"} # type: ignore
def _hierarchical_namespace_migration_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, account_name: str, request_type: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_hierarchical_namespace_migration_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
request_type=request_type,
api_version=api_version,
template_url=self._hierarchical_namespace_migration_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_hierarchical_namespace_migration_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/hnsonmigration"} # type: ignore
@distributed_trace
def begin_hierarchical_namespace_migration(
self, resource_group_name: str, account_name: str, request_type: str, **kwargs: Any
) -> LROPoller[None]:
"""Live Migration of storage account to enable Hns.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param request_type: Required. Hierarchical namespace migration type can either be a
hierarchical namespace validation request 'HnsOnValidationRequest' or a hydration request
'HnsOnHydrationRequest'. The validation request will validate the migration whereas the
hydration request will migrate the account. Required.
:type request_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._hierarchical_namespace_migration_initial( # type: ignore
resource_group_name=resource_group_name,
account_name=account_name,
request_type=request_type,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method = cast(
PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs)
) # type: PollingMethod
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_hierarchical_namespace_migration.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/hnsonmigration"} # type: ignore
def _abort_hierarchical_namespace_migration_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, account_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_abort_hierarchical_namespace_migration_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._abort_hierarchical_namespace_migration_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_abort_hierarchical_namespace_migration_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/aborthnsonmigration"} # type: ignore
@distributed_trace
def begin_abort_hierarchical_namespace_migration(
self, resource_group_name: str, account_name: str, **kwargs: Any
) -> LROPoller[None]:
"""Abort live Migration of storage account to enable Hns.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._abort_hierarchical_namespace_migration_initial( # type: ignore
resource_group_name=resource_group_name,
account_name=account_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method = cast(
PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs)
) # type: PollingMethod
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_abort_hierarchical_namespace_migration.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/aborthnsonmigration"} # type: ignore
def _restore_blob_ranges_initial(
self,
resource_group_name: str,
account_name: str,
parameters: Union[_models.BlobRestoreParameters, IO],
**kwargs: Any
) -> _models.BlobRestoreStatus:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.BlobRestoreStatus]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "BlobRestoreParameters")
request = build_restore_blob_ranges_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._restore_blob_ranges_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("BlobRestoreStatus", pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize("BlobRestoreStatus", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_restore_blob_ranges_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/restoreBlobRanges"} # type: ignore
@overload
def begin_restore_blob_ranges(
self,
resource_group_name: str,
account_name: str,
parameters: _models.BlobRestoreParameters,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.BlobRestoreStatus]:
"""Restore blobs in the specified blob ranges.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param parameters: The parameters to provide for restore blob ranges. Required.
:type parameters: ~azure.mgmt.storage.v2022_05_01.models.BlobRestoreParameters
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either BlobRestoreStatus or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.v2022_05_01.models.BlobRestoreStatus]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_restore_blob_ranges(
self,
resource_group_name: str,
account_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.BlobRestoreStatus]:
"""Restore blobs in the specified blob ranges.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param parameters: The parameters to provide for restore blob ranges. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either BlobRestoreStatus or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.v2022_05_01.models.BlobRestoreStatus]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_restore_blob_ranges(
self,
resource_group_name: str,
account_name: str,
parameters: Union[_models.BlobRestoreParameters, IO],
**kwargs: Any
) -> LROPoller[_models.BlobRestoreStatus]:
"""Restore blobs in the specified blob ranges.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param parameters: The parameters to provide for restore blob ranges. Is either a model type or
a IO type. Required.
:type parameters: ~azure.mgmt.storage.v2022_05_01.models.BlobRestoreParameters or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either BlobRestoreStatus or the result of
cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.storage.v2022_05_01.models.BlobRestoreStatus]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.BlobRestoreStatus]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._restore_blob_ranges_initial( # type: ignore
resource_group_name=resource_group_name,
account_name=account_name,
parameters=parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("BlobRestoreStatus", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method = cast(
PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs)
) # type: PollingMethod
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_restore_blob_ranges.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/restoreBlobRanges"} # type: ignore
@distributed_trace
def revoke_user_delegation_keys( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, account_name: str, **kwargs: Any
) -> None:
"""Revoke user delegation keys.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-01")) # type: Literal["2022-05-01"]
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_revoke_user_delegation_keys_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.revoke_user_delegation_keys.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
revoke_user_delegation_keys.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/revokeUserDelegationKeys"} # type: ignore
| [
"[email protected]"
] | |
a66173adf7118eff4cc81d1df2f6e262385d23c9 | 1b77eaf078321b1320d72aa36a4357568101e4ca | /字典6.5/venv/Scripts/easy_install-script.py | f3f5e0912c303566f77431e585b64cff50c8b803 | [] | no_license | BEE-JN/python_homework | 92ffc1216a380d124901fd64cc541f70813847dc | 8ba4ea79cbd422f40e6f9f1cc5fed4d75715d207 | refs/heads/master | 2020-03-23T08:02:47.863607 | 2018-07-17T15:30:21 | 2018-07-17T15:30:21 | 141,305,118 | 1 | 0 | null | null | null | null | WINDOWS-1252 | Python | false | false | 436 | py | #!E:\python\×Öµä6.5\venv\Scripts\python.exe -x
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==28.8.0','console_scripts','easy_install'
__requires__ = 'setuptools==28.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==28.8.0', 'console_scripts', 'easy_install')()
)
| [
"[email protected]"
] | |
cff524845cd8cd7c51e340615ab03f93dd2e8f56 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2464/60624/263253.py | 43b61c2b54734e6e699622b0d037ab4ce3242c40 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 576 | py | def func8():
target = int(input())
nums = list(map(int, input().split(",")))
low, high, res = 0, len(nums), 0
def helper(size):
sum_size = 0
for i in range(len(nums)):
sum_size += nums[i]
if i >= size:
sum_size -= nums[i-size]
if sum_size >= target:
return True
return False
while low <= high:
mid = (low+high)//2
if helper(mid):
res = mid
high = mid-1
else:
low = mid+1
print(res)
return
func8() | [
"[email protected]"
] | |
aaf0a6ea8f8061f08ac8215d02d2a9afd75d1594 | 544cfadc742536618168fc80a5bd81a35a5f2c99 | /tools/test/connectivity/acts/framework/tests/controllers/ap_lib/radvd_test.py | 452bd655aba4b1be73089b416c1c32a84ab76fff | [] | no_license | ZYHGOD-1/Aosp11 | 0400619993b559bf4380db2da0addfa9cccd698d | 78a61ca023cbf1a0cecfef8b97df2b274ac3a988 | refs/heads/main | 2023-04-21T20:13:54.629813 | 2021-05-22T05:28:21 | 2021-05-22T05:28:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,027 | py | #!/usr/bin/env python3
#
# Copyright 2020 - The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import time
import unittest
from unittest.mock import patch
from acts.controllers.ap_lib import radvd_constants
from acts.controllers.ap_lib.radvd import Error
from acts.controllers.ap_lib.radvd import Radvd
from acts.controllers.ap_lib.radvd_config import RadvdConfig
RADVD_PREFIX = 'fd00::/64'
SEARCH_FILE = ('acts.controllers.utils_lib.commands.shell.'
'ShellCommand.search_file')
DELETE_FILE = ('acts.controllers.utils_lib.commands.shell.ShellCommand.'
'delete_file')
CORRECT_COMPLEX_RADVD_CONFIG = ("""interface wlan0 {
IgnoreIfMissing on;
AdvSendAdvert off;
UnicastOnly on;
MaxRtrAdvInterval 60;
MinRtrAdvInterval 5;
MinDelayBetweenRAs 5;
AdvManagedFlag off;
AdvOtherConfigFlag on;
AdvLinkMTU 1400;
AdvReachableTime 3600000;
AdvRetransTimer 10;
AdvCurHopLimit 50;
AdvDefaultLifetime 8000;
AdvDefaultPreference off;
AdvSourceLLAddress on;
AdvHomeAgentFlag off;
AdvHomeAgentInfo on;
HomeAgentLifetime 100;
HomeAgentPreference 100;
AdvMobRtrSupportFlag off;
AdvIntervalOpt on;
prefix fd00::/64
{
AdvOnLink off;
AdvAutonomous on;
AdvRouterAddr off;
AdvValidLifetime 86400;
AdvPreferredLifetime 14400;
Base6to4Interface NA;
};
clients
{
fe80::c66d:3c75:2cec:1d72;
fe80::c66d:3c75:2cec:1d73;
};
route fd00::/64 {
AdvRouteLifetime 1024;
AdvRoutePreference high;
};
RDNSS 2401:fa00:480:7a00:4d56:5373:4549:1e29 2401:fa00:480:7a00:4d56:5373:4549:1e30 {
AdvRDNSSPreference 8;
AdvRDNSSOpen on;
AdvRDNSSLifetime 1025;
};
};""".replace(" ", "\t"))
CORRECT_SIMPLE_RADVD_CONFIG = ("""interface wlan0 {
AdvSendAdvert on;
prefix fd00::/64
{
AdvOnLink on;
AdvAutonomous on;
};
};""".replace(" ", "\t"))
def delete_file_mock(file_to_delete):
if os.path.exists(file_to_delete):
os.remove(file_to_delete)
def write_configs_mock(config_file_with_path, output_config):
with open(config_file_with_path, 'w+') as config_fileId:
config_fileId.write(output_config)
class RadvdTest(unittest.TestCase):
@patch('acts.controllers.utils_lib.commands.shell.ShellCommand.kill')
def test_radvd_ikill(self, kill):
kill.return_value = True
radvd_mock = Radvd('mock_runner', 'wlan0')
self.assertIsNone(radvd_mock.stop())
@patch('acts.controllers.utils_lib.commands.shell.ShellCommand.is_alive')
def test_radvd_is_alive_True(self, is_alive_mock):
is_alive_mock.return_value = True
radvd_mock = Radvd('mock_runner', 'wlan0')
self.assertTrue(radvd_mock.is_alive())
@patch('acts.controllers.utils_lib.commands.shell.ShellCommand.is_alive')
def test_radvd_is_alive_False(self, is_alive_mock):
is_alive_mock.return_value = False
radvd_mock = Radvd('mock_runner', 'wlan0')
self.assertFalse(radvd_mock.is_alive())
@patch('acts.controllers.ap_lib.radvd.Radvd._scan_for_errors')
@patch('acts.controllers.ap_lib.radvd.Radvd.is_alive')
def test_wait_for_process_process_alive(self, is_alive_mock,
_scan_for_errors_mock):
is_alive_mock.return_value = True
_scan_for_errors_mock.return_value = True
radvd_mock = Radvd('mock_runner', 'wlan0')
self.assertIsNone(radvd_mock._wait_for_process(timeout=2))
@patch('acts.controllers.ap_lib.radvd.Radvd.is_alive')
@patch(SEARCH_FILE)
def test_scan_for_errors_is_dead(self, search_file_mock, is_alive_mock):
is_alive_mock.return_value = False
search_file_mock.return_value = False
radvd_mock = Radvd('mock_runner', 'wlan0')
with self.assertRaises(Error) as context:
radvd_mock._scan_for_errors(True)
self.assertTrue('Radvd failed to start' in str(context.exception))
@patch('acts.controllers.ap_lib.radvd.Radvd.is_alive')
@patch(SEARCH_FILE)
def test_scan_for_errors_exited_prematurely(self, search_file_mock,
is_alive_mock):
is_alive_mock.return_value = True
search_file_mock.return_value = True
radvd_mock = Radvd('mock_runner', 'wlan0')
with self.assertRaises(Error) as context:
radvd_mock._scan_for_errors(True)
self.assertTrue('Radvd exited prematurely.' in str(context.exception))
@patch('acts.controllers.ap_lib.radvd.Radvd.is_alive')
@patch(SEARCH_FILE)
def test_scan_for_errors_success(self, search_file_mock, is_alive_mock):
is_alive_mock.return_value = True
search_file_mock.return_value = False
radvd_mock = Radvd('mock_runner', 'wlan0')
self.assertIsNone(radvd_mock._scan_for_errors(True))
@patch(DELETE_FILE)
@patch('acts.controllers.utils_lib.commands.shell.ShellCommand.write_file')
def test_write_configs_simple(self, write_file, delete_file):
delete_file.side_effect = delete_file_mock
write_file.side_effect = write_configs_mock
basic_radvd_config = RadvdConfig(
prefix=RADVD_PREFIX,
adv_send_advert=radvd_constants.ADV_SEND_ADVERT_ON,
adv_on_link=radvd_constants.ADV_ON_LINK_ON,
adv_autonomous=radvd_constants.ADV_AUTONOMOUS_ON)
radvd_mock = Radvd('mock_runner', 'wlan0')
radvd_mock._write_configs(basic_radvd_config)
radvd_config = radvd_mock._config_file
with open(radvd_config, 'r') as radvd_config_fileId:
config_data = radvd_config_fileId.read()
self.assertTrue(CORRECT_SIMPLE_RADVD_CONFIG == config_data)
@patch(DELETE_FILE)
@patch('acts.controllers.utils_lib.commands.shell.ShellCommand.write_file')
def test_write_configs_complex(self, write_file, delete_file):
delete_file.side_effect = delete_file_mock
write_file.side_effect = write_configs_mock
complex_radvd_config = RadvdConfig(
prefix=RADVD_PREFIX,
clients=['fe80::c66d:3c75:2cec:1d72', 'fe80::c66d:3c75:2cec:1d73'],
route=RADVD_PREFIX,
rdnss=[
'2401:fa00:480:7a00:4d56:5373:4549:1e29',
'2401:fa00:480:7a00:4d56:5373:4549:1e30',
],
ignore_if_missing=radvd_constants.IGNORE_IF_MISSING_ON,
adv_send_advert=radvd_constants.ADV_SEND_ADVERT_OFF,
unicast_only=radvd_constants.UNICAST_ONLY_ON,
max_rtr_adv_interval=60,
min_rtr_adv_interval=5,
min_delay_between_ras=5,
adv_managed_flag=radvd_constants.ADV_MANAGED_FLAG_OFF,
adv_other_config_flag=radvd_constants.ADV_OTHER_CONFIG_FLAG_ON,
adv_link_mtu=1400,
adv_reachable_time=3600000,
adv_retrans_timer=10,
adv_cur_hop_limit=50,
adv_default_lifetime=8000,
adv_default_preference=radvd_constants.ADV_DEFAULT_PREFERENCE_OFF,
adv_source_ll_address=radvd_constants.ADV_SOURCE_LL_ADDRESS_ON,
adv_home_agent_flag=radvd_constants.ADV_HOME_AGENT_FLAG_OFF,
adv_home_agent_info=radvd_constants.ADV_HOME_AGENT_INFO_ON,
home_agent_lifetime=100,
home_agent_preference=100,
adv_mob_rtr_support_flag=radvd_constants.
ADV_MOB_RTR_SUPPORT_FLAG_OFF,
adv_interval_opt=radvd_constants.ADV_INTERVAL_OPT_ON,
adv_on_link=radvd_constants.ADV_ON_LINK_OFF,
adv_autonomous=radvd_constants.ADV_AUTONOMOUS_ON,
adv_router_addr=radvd_constants.ADV_ROUTER_ADDR_OFF,
adv_valid_lifetime=86400,
adv_preferred_lifetime=14400,
base_6to4_interface='NA',
adv_route_lifetime=1024,
adv_route_preference=radvd_constants.ADV_ROUTE_PREFERENCE_HIGH,
adv_rdnss_preference=8,
adv_rdnss_open=radvd_constants.ADV_RDNSS_OPEN_ON,
adv_rdnss_lifetime=1025)
radvd_mock = Radvd('mock_runner', 'wlan0')
radvd_mock._write_configs(complex_radvd_config)
radvd_config = radvd_mock._config_file
with open(radvd_config, 'r') as radvd_config_fileId:
config_data = radvd_config_fileId.read()
self.assertTrue(CORRECT_COMPLEX_RADVD_CONFIG == config_data)
| [
"[email protected]"
] | |
71095ab94c5e4ddec9a1d84902c4dd9e3bef9571 | 8d90e2eae476ecbe88d46ef2f03fe7ba92cc733b | /Programming Basics with Python/For-cycle/For_C_lab_ex6_sum_of_vowels.py | 39e46fed5101d534882c2b874c0d0b2764c03068 | [] | no_license | KaterinaMutafova/SoftUni | c3f8bae3c2bf7bd4038da010ca03edc412672468 | 7aeef6f25c3479a8d677676cb1d66df20ca0d411 | refs/heads/main | 2023-03-08T10:53:49.748153 | 2021-02-19T15:55:13 | 2021-02-19T15:55:13 | 317,597,660 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 276 | py | text = input()
result = 0
for i in text:
if i == "a":
result += 1
elif i == "e":
result += 2
elif i == "i":
result += 3
elif i == "o":
result += 4
elif i == "u":
result += 5
print(result)
| [
"[email protected]"
] | |
67d1221bfdb2b6a345db86fe818fdbf3895b92fb | 1c72aa6d53c886d8fb8ae41a3e9b9c6c4dd9dc6f | /Semester 1/Project submissions/Lee Eldridge/Excercise Weeks 1-9 - Lee Eldridge/Week 7/url_reader.py | e2cc32fffbe071db0b3e936c95c54fccaf8b2641 | [] | no_license | codebubb/python_course | 74761ce3189d67e3aff964c056aeab27d4e94d4a | 4a6ed4a64e6a726d886add8364c65956d5053fc2 | refs/heads/master | 2021-01-11T03:06:50.519208 | 2016-07-29T10:47:12 | 2016-10-17T10:42:29 | 71,114,974 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 536 | py | import time
import urllib2
open_file = urllib2.urlopen('http://wordpress.org/plugins/about/readme.txt', 'r')
read = open_file.read()
file_list = read.split()
print read
print ""
print "Hello, currently doing some analysis, please wait..."
time.sleep(3)
print "There are currently:", len(file_list), "words in the above text"
print "There are currently:", len(set(file_list)), "unique words in the above text"
count = 0
for e in file_list:
count = len(e) + count
print "There are currently:", count, "letters in the above text."
| [
"[email protected]"
] | |
2ae47ede9827fbde591f754ac58ffc0dc2fac0d9 | 1ac99f8065a2646bdb8ea9003fd5930341fb0cf4 | /Exam2/3.py | 096cd041d3b9738fdbd0949fc47c080513e7b1e3 | [] | no_license | krishnanunni-pr/Pyrhon-Django | 894547f3d4d22dce3fff14e88815122c12c145b5 | c59471f947ceb103bb27a19e8a2a160e8ada529b | refs/heads/master | 2023-07-29T19:40:38.199104 | 2021-09-09T18:12:24 | 2021-09-09T18:12:24 | 385,128,009 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 648 | py | # 3. Create a Book class with instance Library_name, book_name, author, pages?
class Book:
def bookdetails(self,bkname,author,pages,library_name):
self.library_name=library_name
self.bkname=bkname
self.author=author
self.pages=pages
print("Book name :",bkname)
print("Authour :",author)
print("Number of pages :",pages)
print("Library section :",library_name)
obj=Book()
bname=input("Enter the name of book :")
author=input("Name of authour :")
pageno=int(input("Number of pages :"))
library_name=input("Enter library name :")
obj.bookdetails(bname,author,pageno,library_name)
| [
"[email protected]"
] | |
3b83024a4da9cdde39c50333316b03838417d3a9 | eac55c1fbbf83f08eabdfd5337ae54ca24ed655b | /build/velodyne/velodyne_msgs/catkin_generated/pkg.develspace.context.pc.py | 85c1b12f52780f9af793f52565600c24d7496253 | [] | no_license | codeJRV/velodyne_ws | c4271d81de66ee354f9c948aa961f56266e74e55 | ae684357b2d3f0ddc8a327cd9f625f727d02e145 | refs/heads/master | 2021-01-25T14:03:39.322967 | 2018-03-15T23:16:36 | 2018-03-15T23:16:36 | 123,643,268 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 522 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/jrv/Research/Velodyne/velodyne_ws/devel/include".split(';') if "/home/jrv/Research/Velodyne/velodyne_ws/devel/include" != "" else []
PROJECT_CATKIN_DEPENDS = "message_runtime;std_msgs".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "velodyne_msgs"
PROJECT_SPACE_DIR = "/home/jrv/Research/Velodyne/velodyne_ws/devel"
PROJECT_VERSION = "1.3.0"
| [
"[email protected]"
] | |
1598000563214e90bffa55f937fbfb0a4165c9b2 | 4d058cd2a41712cf17bbd5ef5f5db58bb0134220 | /jug/jug_version.py | 957827d8d99ed85b0c58fff4a7e5af6e790a46fb | [
"MIT"
] | permissive | hamiltont/jug | 1d0c14a62328e3a053919e2dfa06aff9ec70105e | e89668387cf3cca9df700fccb6913a1fd3b5f424 | refs/heads/master | 2021-01-18T12:11:09.882447 | 2013-03-25T20:42:17 | 2013-03-25T20:42:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26 | py | __version__ = '0.9.3+git'
| [
"[email protected]"
] | |
c7f1fa58dbac7f6e81831d6d8a3c59cdc2507686 | 3899dd3debab668ef0c4b91c12127e714bdf3d6d | /venv/Lib/site-packages/tensorflow/python/grappler/cluster.py | 9f3a130798ca0f57cbdcd49506e0c1d6fe7033db | [] | no_license | SphericalPotatoInVacuum/CNNDDDD | b2f79521581a15d522d8bb52f81b731a3c6a4db4 | 03c5c0e7cb922f53f31025b7dd78287a19392824 | refs/heads/master | 2020-04-21T16:10:25.909319 | 2019-02-08T06:04:42 | 2019-02-08T06:04:42 | 169,691,960 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 5,139 | py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A python interface for Grappler clusters."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
from tensorflow.core.framework import step_stats_pb2
from tensorflow.core.grappler.costs import op_performance_data_pb2
from tensorflow.core.protobuf import device_properties_pb2
from tensorflow.python import pywrap_tensorflow as tf_cluster
from tensorflow.python.framework import errors
class Cluster(object):
"""Grappler Clusters."""
def __init__(self,
allow_soft_placement=True,
disable_detailed_stats=True,
disable_timeline=True,
devices=None):
"""Creates a Cluster.
Args:
allow_soft_placement: If True, TF will automatically fix illegal
placements instead of erroring out if the placement isn't legal.
disable_detailed_stats: If True, detailed statistics will not be
available.
disable_timeline: If True, the timeline information will not be reported.
devices: A list of devices of type device_properties_pb2.NamedDevice.
If None, a device list will be created based on the spec of
the local machine.
"""
self._tf_cluster = None
self._generate_timeline = not disable_timeline
with errors.raise_exception_on_not_ok_status() as status:
if devices is None:
self._tf_cluster = tf_cluster.TF_NewCluster(
allow_soft_placement, disable_detailed_stats, status)
else:
devices_serialized = [device.SerializeToString() for device in devices]
self._tf_cluster = tf_cluster.TF_NewVirtualCluster(
devices_serialized, status)
def Shutdown(self):
if self._tf_cluster is not None:
tf_cluster.TF_ShutdownCluster(self._tf_cluster)
self._tf_cluster = None
def __del__(self):
self.Shutdown()
@property
def tf_cluster(self):
return self._tf_cluster
def ListDevices(self):
"""Returns the list of available hardware devices."""
devices = []
if self._tf_cluster is not None:
ret_from_swig = tf_cluster.TF_ListDevices(self._tf_cluster)
devices = []
for raw_dev in ret_from_swig:
devices.append(device_properties_pb2.NamedDevice.FromString(raw_dev))
return devices
def ListAvailableOps(self):
"""Returns a list of all the available operations (sorted alphatically)."""
return tf_cluster.TF_ListAvailableOps()
def GetSupportedDevices(self, item):
return tf_cluster.TF_GetSupportedDevices(self._tf_cluster, item.tf_item)
def EstimatePerformance(self, device):
"""Estimate the performance of the specified device."""
serialized = device.SerializeToString()
return tf_cluster.TF_EstimatePerformance(serialized)
def MeasureCosts(self, item):
"""Returns the cost of running the specified item.
Args:
item: The item for which to measure the costs.
Returns: The triplet op_perfs, runtime, step_stats.
"""
with errors.raise_exception_on_not_ok_status() as status:
ret_from_swig = tf_cluster.TF_MeasureCosts(
item.tf_item, self._tf_cluster, self._generate_timeline, status)
if ret_from_swig is None:
return None
op_perf_bytes_list, run_time, step_stats_bytes = ret_from_swig
op_perfs = []
for op_perf_bytes in op_perf_bytes_list:
op_perfs.append(
op_performance_data_pb2.OpPerformance.FromString(op_perf_bytes))
return (op_perfs, run_time,
step_stats_pb2.StepStats.FromString(step_stats_bytes))
def DeterminePeakMemoryUsage(self, item):
"""Returns a snapshot of the peak memory usage.
Args:
item: The item for which to measure the costs.
Returns: A hashtable indexed by device name.
"""
with errors.raise_exception_on_not_ok_status() as status:
ret_from_swig = tf_cluster.TF_DeterminePeakMemoryUsage(
item.tf_item, self._tf_cluster, status)
return ret_from_swig
@contextlib.contextmanager
def Provision(allow_soft_placement=True,
disable_detailed_stats=True,
disable_timeline=True,
devices=None):
cluster = Cluster(allow_soft_placement, disable_detailed_stats,
disable_timeline, devices)
yield cluster
cluster.Shutdown()
| [
"[email protected]"
] | |
cc2a32a439a92c92aa61ba4ea571b75e901de399 | 77311ad9622a7d8b88707d7cee3f44de7c8860cb | /res/scripts/client/gui/scaleform/daapi/view/meta/fortdatepickerpopovermeta.py | 206f951b446d8c248b6507b491083faefdebba91 | [] | no_license | webiumsk/WOT-0.9.14-CT | 9b193191505a4560df4e872e022eebf59308057e | cfe0b03e511d02c36ce185f308eb48f13ecc05ca | refs/heads/master | 2021-01-10T02:14:10.830715 | 2016-02-14T11:59:59 | 2016-02-14T11:59:59 | 51,606,676 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 546 | py | # 2016.02.14 12:40:19 Střední Evropa (běžný čas)
# Embedded file name: scripts/client/gui/Scaleform/daapi/view/meta/FortDatePickerPopoverMeta.py
from gui.Scaleform.daapi.view.lobby.popover.SmartPopOverView import SmartPopOverView
class FortDatePickerPopoverMeta(SmartPopOverView):
pass
# okay decompyling c:\Users\PC\wotsources\files\originals\res\scripts\client\gui\scaleform\daapi\view\meta\fortdatepickerpopovermeta.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2016.02.14 12:40:19 Střední Evropa (běžný čas)
| [
"[email protected]"
] | |
fee57e6f0f3362697e4497bd4b3b54038a39cbcd | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/nnmagil.py | 48c358844545a847196514438a8bcbd919827695 | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 43 | py | ii = [('DaltJMA.py', 1), ('MartHRW.py', 1)] | [
"[email protected]"
] | |
99e6ffcd8ba252b61f9439f0d8d7c6ec9c63d164 | 8d6f9a3d65a189d99eff10e30cfabb0b761b635f | /mystic/abstract_solver.py | 4af6b771bf30b07d254c1ee0b0e4f5432ba6772d | [
"BSD-3-Clause"
] | permissive | arita37/mystic | db2ebbed139b163e3e5df49c2325b3de35dd8cd0 | 3dcdd4627eb759672091859e8334be075bfd25a5 | refs/heads/master | 2021-01-22T20:19:22.569893 | 2016-08-20T15:52:46 | 2016-08-20T15:52:46 | 66,545,670 | 1 | 0 | null | 2016-08-25T09:42:31 | 2016-08-25T09:42:31 | null | UTF-8 | Python | false | false | 38,792 | py | #!/usr/bin/env python
#
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
# Copyright (c) 1997-2016 California Institute of Technology.
# License: 3-clause BSD. The full license text is available at:
# - http://trac.mystic.cacr.caltech.edu/project/mystic/browser/mystic/LICENSE
#
## Abstract Solver Class
# derived from Patrick Hung's original DifferentialEvolutionSolver
"""
This module contains the base class for mystic solvers, and describes
the mystic solver interface. The "Solve" method must be overwritten
with the derived solver's optimization algorithm. In many cases, a
minimal function call interface for a derived solver is provided
along with the derived class. See `mystic.scipy_optimize`, and the
following for an example.
Usage
=====
A typical call to a mystic solver will roughly follow this example:
>>> # the function to be minimized and the initial values
>>> from mystic.models import rosen
>>> x0 = [0.8, 1.2, 0.7]
>>>
>>> # get monitors and termination condition objects
>>> from mystic.monitors import Monitor
>>> stepmon = Monitor()
>>> evalmon = Monitor()
>>> from mystic.termination import CandidateRelativeTolerance as CRT
>>>
>>> # instantiate and configure the solver
>>> from mystic.solvers import NelderMeadSimplexSolver
>>> solver = NelderMeadSimplexSolver(len(x0))
>>> solver.SetInitialPoints(x0)
>>> solver.SetEvaluationMonitor(evalmon)
>>> solver.SetGenerationMonitor(stepmon)
>>> solver.enable_signal_handler()
>>> solver.SetTermination(CRT())
>>> solver.Solve(rosen)
>>>
>>> # obtain the solution
>>> solution = solver.Solution()
An equivalent, yet less flexible, call using the minimal interface is:
>>> # the function to be minimized and the initial values
>>> from mystic.models import rosen
>>> x0 = [0.8, 1.2, 0.7]
>>>
>>> # configure the solver and obtain the solution
>>> from mystic.solvers import fmin
>>> solution = fmin(rosen,x0)
Handler
=======
All solvers packaged with mystic include a signal handler that
provides the following options::
sol: Print current best solution.
cont: Continue calculation.
call: Executes sigint_callback, if provided.
exit: Exits with current best solution.
Handlers are enabled with the 'enable_signal_handler' method,
and are configured through the solver's 'Solve' method. Handlers
trigger when a signal interrupt (usually, Ctrl-C) is given while
the solver is running.
"""
__all__ = ['AbstractSolver']
import random
import numpy
from numpy import inf, shape, asarray, absolute, asfarray, seterr
from mystic.tools import wrap_function, wrap_nested, wrap_reducer
from mystic.tools import wrap_bounds, wrap_penalty, reduced
from klepto import isvalid, validate
abs = absolute
null = lambda x: None
class AbstractSolver(object):
"""
AbstractSolver base class for mystic optimizers.
"""
def __init__(self, dim, **kwds):
"""
Takes one initial input:
dim -- dimensionality of the problem.
Additional inputs:
npop -- size of the trial solution population. [default = 1]
Important class members:
nDim, nPop = dim, npop
generations - an iteration counter.
evaluations - an evaluation counter.
bestEnergy - current best energy.
bestSolution - current best parameter set. [size = dim]
popEnergy - set of all trial energy solutions. [size = npop]
population - set of all trial parameter solutions. [size = dim*npop]
solution_history - history of bestSolution status. [StepMonitor.x]
energy_history - history of bestEnergy status. [StepMonitor.y]
signal_handler - catches the interrupt signal.
"""
NP = kwds['npop'] if 'npop' in kwds else 1
self.nDim = dim
self.nPop = NP
self._init_popEnergy = inf
self.popEnergy = [self._init_popEnergy] * NP
self.population = [[0.0 for i in range(dim)] for j in range(NP)]
self.trialSolution = [0.0] * dim
self._map_solver = False
self._bestEnergy = None
self._bestSolution = None
self._state = None
self._type = self.__class__.__name__
self.signal_handler = None
self._handle_sigint = False
self._useStrictRange = False
self._defaultMin = [-1e3] * dim
self._defaultMax = [ 1e3] * dim
self._strictMin = []
self._strictMax = []
self._maxiter = None
self._maxfun = None
self._saveiter = None
#self._saveeval = None
from mystic.monitors import Null, Monitor
self._evalmon = Null()
self._stepmon = Monitor()
self._fcalls = [0]
self._energy_history = None
self._solution_history= None
self.id = None # identifier (use like "rank" for MPI)
self._constraints = lambda x: x
self._penalty = lambda x: 0.0
self._reducer = None
self._cost = (None, None, None)
# (cost, raw_cost, args) #,callback)
self._collapse = False
self._termination = lambda x, *ar, **kw: False if len(ar) < 1 or ar[0] is False or (kw['info'] if 'info' in kw else True) == False else '' #XXX: better default ?
# (get termination details with self._termination.__doc__)
import mystic.termination as mt
self._EARLYEXIT = mt.EARLYEXIT
self._live = False
return
def Solution(self):
"""return the best solution"""
return self.bestSolution
def __evaluations(self):
"""get the number of function calls"""
return self._fcalls[0]
def __generations(self):
"""get the number of iterations"""
return max(0,len(self._stepmon)-1)
def __energy_history(self):
"""get the energy_history (default: energy_history = _stepmon._y)"""
if self._energy_history is None: return self._stepmon._y
return self._energy_history
def __set_energy_history(self, energy):
"""set the energy_history (energy=None will sync with _stepmon._y)"""
self._energy_history = energy
return
def __solution_history(self):
"""get the solution_history (default: solution_history = _stepmon.x)"""
if self._solution_history is None: return self._stepmon.x
return self._solution_history
def __set_solution_history(self, params):
"""set the solution_history (params=None will sync with _stepmon.x)"""
self._solution_history = params
return
def __bestSolution(self):
"""get the bestSolution (default: bestSolution = population[0])"""
if self._bestSolution is None: return self.population[0]
return self._bestSolution
def __set_bestSolution(self, params):
"""set the bestSolution (params=None will sync with population[0])"""
self._bestSolution = params
return
def __bestEnergy(self):
"""get the bestEnergy (default: bestEnergy = popEnergy[0])"""
if self._bestEnergy is None: return self.popEnergy[0]
return self._bestEnergy
def __set_bestEnergy(self, energy):
"""set the bestEnergy (energy=None will sync with popEnergy[0])"""
self._bestEnergy = energy
return
def SetReducer(self, reducer, arraylike=False):
"""apply a reducer function to the cost function
input::
- a reducer function of the form: y' = reducer(yk), where yk is a results
vector and y' is a single value. Ideally, this method is applied to
a cost function with a multi-value return, to reduce the output to a
single value. If arraylike, the reducer provided should take a single
array as input and produce a scalar; otherwise, the reducer provided
should meet the requirements of the python's builtin 'reduce' method
(e.g. lambda x,y: x+y), taking two scalars and producing a scalar."""
if not reducer:
self._reducer = None
elif not callable(reducer):
raise TypeError, "'%s' is not a callable function" % reducer
elif not arraylike:
self._reducer = wrap_reducer(reducer)
else: #XXX: check if is arraylike?
self._reducer = reducer
return self._update_objective()
def SetPenalty(self, penalty):
"""apply a penalty function to the optimization
input::
- a penalty function of the form: y' = penalty(xk), with y = cost(xk) + y',
where xk is the current parameter vector. Ideally, this function
is constructed so a penalty is applied when the desired (i.e. encoded)
constraints are violated. Equality constraints should be considered
satisfied when the penalty condition evaluates to zero, while
inequality constraints are satisfied when the penalty condition
evaluates to a non-positive number."""
if not penalty:
self._penalty = lambda x: 0.0
elif not callable(penalty):
raise TypeError, "'%s' is not a callable function" % penalty
else: #XXX: check for format: y' = penalty(x) ?
self._penalty = penalty
return self._update_objective()
def SetConstraints(self, constraints):
"""apply a constraints function to the optimization
input::
- a constraints function of the form: xk' = constraints(xk),
where xk is the current parameter vector. Ideally, this function
is constructed so the parameter vector it passes to the cost function
will satisfy the desired (i.e. encoded) constraints."""
if not constraints:
self._constraints = lambda x: x
elif not callable(constraints):
raise TypeError, "'%s' is not a callable function" % constraints
else: #XXX: check for format: x' = constraints(x) ?
self._constraints = constraints
return self._update_objective()
def SetGenerationMonitor(self, monitor, new=False):
"""select a callable to monitor (x, f(x)) after each solver iteration"""
from mystic.monitors import Null, Monitor#, CustomMonitor
current = Null() if new else self._stepmon
if isinstance(monitor, Monitor): # is Monitor()
self._stepmon = monitor
self._stepmon.prepend(current)
elif isinstance(monitor, Null) or monitor == Null: # is Null() or Null
self._stepmon = Monitor() #XXX: don't allow Null
self._stepmon.prepend(current)
elif hasattr(monitor, '__module__'): # is CustomMonitor()
if monitor.__module__ in ['mystic._genSow']:
self._stepmon = monitor #FIXME: need .prepend(current)
else:
raise TypeError, "'%s' is not a monitor instance" % monitor
self.energy_history = None # sync with self._stepmon
self.solution_history = None # sync with self._stepmon
return
def SetEvaluationMonitor(self, monitor, new=False):
"""select a callable to monitor (x, f(x)) after each cost function evaluation"""
from mystic.monitors import Null, Monitor#, CustomMonitor
current = Null() if new else self._evalmon
if isinstance(monitor, (Null, Monitor) ): # is Monitor() or Null()
self._evalmon = monitor
self._evalmon.prepend(current)
elif monitor == Null: # is Null
self._evalmon = monitor()
self._evalmon.prepend(current)
elif hasattr(monitor, '__module__'): # is CustomMonitor()
if monitor.__module__ in ['mystic._genSow']:
self._evalmon = monitor #FIXME: need .prepend(current)
else:
raise TypeError, "'%s' is not a monitor instance" % monitor
return
def SetStrictRanges(self, min=None, max=None):
"""ensure solution is within bounds
input::
- min, max: must be a sequence of length self.nDim
- each min[i] should be <= the corresponding max[i]
note::
SetStrictRanges(None) will remove strict range constraints"""
if min is False or max is False:
self._useStrictRange = False
return self._update_objective()
#XXX: better to use 'defaultMin,defaultMax' or '-inf,inf' ???
if min is None: min = self._defaultMin
if max is None: max = self._defaultMax
# when 'some' of the bounds are given as 'None', replace with default
for i in range(len(min)):
if min[i] is None: min[i] = self._defaultMin[0]
if max[i] is None: max[i] = self._defaultMax[0]
min = asarray(min); max = asarray(max)
if numpy.any(( min > max ),0):
raise ValueError, "each min[i] must be <= the corresponding max[i]"
if len(min) != self.nDim:
raise ValueError, "bounds array must be length %s" % self.nDim
self._useStrictRange = True
self._strictMin = min
self._strictMax = max
return self._update_objective()
def _clipGuessWithinRangeBoundary(self, x0, at=True):
"""ensure that initial guess is set within bounds
input::
- x0: must be a sequence of length self.nDim"""
#if len(x0) != self.nDim: #XXX: unnecessary w/ self.trialSolution
# raise ValueError, "initial guess must be length %s" % self.nDim
x0 = asarray(x0)
bounds = (self._strictMin,self._strictMax)
if not len(self._strictMin): return x0
# clip x0 at bounds
settings = numpy.seterr(all='ignore')
x_ = x0.clip(*bounds)
numpy.seterr(**settings)
if at: return x_
# clip x0 within bounds
x_ = x_ != x0
x0[x_] = random.uniform(self._strictMin,self._strictMax)[x_]
return x0
def SetInitialPoints(self, x0, radius=0.05):
"""Set Initial Points with Guess (x0)
input::
- x0: must be a sequence of length self.nDim
- radius: generate random points within [-radius*x0, radius*x0]
for i!=0 when a simplex-type initial guess in required"""
x0 = asfarray(x0)
rank = len(x0.shape)
if rank is 0:
x0 = asfarray([x0])
rank = 1
if not -1 < rank < 2:
raise ValueError, "Initial guess must be a scalar or rank-1 sequence."
if len(x0) != self.nDim:
raise ValueError, "Initial guess must be length %s" % self.nDim
#slightly alter initial values for solvers that depend on randomness
min = x0*(1-radius)
max = x0*(1+radius)
numzeros = len(x0[x0==0])
min[min==0] = asarray([-radius for i in range(numzeros)])
max[max==0] = asarray([radius for i in range(numzeros)])
self.SetRandomInitialPoints(min,max)
#stick initial values in population[i], i=0
self.population[0] = x0.tolist()
def SetRandomInitialPoints(self, min=None, max=None):
"""Generate Random Initial Points within given Bounds
input::
- min, max: must be a sequence of length self.nDim
- each min[i] should be <= the corresponding max[i]"""
if min is None: min = self._defaultMin
if max is None: max = self._defaultMax
#if numpy.any(( asarray(min) > asarray(max) ),0):
# raise ValueError, "each min[i] must be <= the corresponding max[i]"
if len(min) != self.nDim or len(max) != self.nDim:
raise ValueError, "bounds array must be length %s" % self.nDim
# when 'some' of the bounds are given as 'None', replace with default
for i in range(len(min)):
if min[i] is None: min[i] = self._defaultMin[0]
if max[i] is None: max[i] = self._defaultMax[0]
#generate random initial values
for i in range(len(self.population)):
for j in range(self.nDim):
self.population[i][j] = random.uniform(min[j],max[j])
def SetMultinormalInitialPoints(self, mean, var=None):
"""Generate Initial Points from Multivariate Normal.
input::
- mean must be a sequence of length self.nDim
- var can be...
None: -> it becomes the identity
scalar: -> var becomes scalar * I
matrix: -> the variance matrix. must be the right size!
"""
from mystic.tools import random_state
rng = random_state(module='numpy.random')
assert(len(mean) == self.nDim)
if var is None:
var = numpy.eye(self.nDim)
else:
try: # scalar ?
float(var)
except: # nope. var better be matrix of the right size (no check)
pass
else:
var = var * numpy.eye(self.nDim)
for i in range(len(self.population)):
self.population[i] = rng.multivariate_normal(mean, var).tolist()
return
def SetSampledInitialPoints(self, dist=None):
"""Generate Random Initial Points from Distribution (dist)
input::
- dist: a mystic.math.Distribution instance
"""
from mystic.math import Distribution
if dist is None:
dist = Distribution()
elif type(Distribution) not in dist.__class__.mro():
dist = Distribution(dist) #XXX: or throw error?
for i in range(self.nPop):
self.population[i] = dist(self.nDim)
return
def enable_signal_handler(self):#, callback='*'):
"""enable workflow interrupt handler while solver is running"""
""" #XXX: disabled, as would add state to solver
input::
- if a callback function is provided, generate a new handler with
the given callback. If callback is None, do not use a callback.
If callback is not provided, just turn on the existing handler.
"""
## always _generate handler on first call
#if (self.signal_handler is None) and callback == '*':
# callback = None
## when a new callback is given, generate a new handler
#if callback != '*':
# self._generateHandler(callback)
self._handle_sigint = True
def disable_signal_handler(self):
"""disable workflow interrupt handler while solver is running"""
self._handle_sigint = False
def _generateHandler(self,sigint_callback):
"""factory to generate signal handler
Available switches::
- sol --> Print current best solution.
- cont --> Continue calculation.
- call --> Executes sigint_callback, if provided.
- exit --> Exits with current best solution.
"""
def handler(signum, frame):
import inspect
print inspect.getframeinfo(frame)
print inspect.trace()
while 1:
s = raw_input(\
"""
Enter sense switch.
sol: Print current best solution.
cont: Continue calculation.
call: Executes sigint_callback [%s].
exit: Exits with current best solution.
>>> """ % sigint_callback)
if s.lower() == 'sol':
print self.bestSolution
elif s.lower() == 'cont':
return
elif s.lower() == 'call':
# sigint call_back
if sigint_callback is not None:
sigint_callback(self.bestSolution)
elif s.lower() == 'exit':
self._EARLYEXIT = True
return
else:
print "unknown option : %s" % s
return
self.signal_handler = handler
return
def SetSaveFrequency(self, generations=None, filename=None, **kwds):
"""set frequency for saving solver restart file
input::
- generations = number of solver iterations before next save of state
- filename = name of file in which to save solver state
note::
SetSaveFrequency(None) will disable saving solver restart file"""
self._saveiter = generations
#self._saveeval = evaluations
self._state = filename
return
def SetEvaluationLimits(self, generations=None, evaluations=None, \
new=False, **kwds):
"""set limits for generations and/or evaluations
input::
- generations = maximum number of solver iterations (i.e. steps)
- evaluations = maximum number of function evaluations"""
# backward compatibility
self._maxiter = kwds['maxiter'] if 'maxiter' in kwds else generations
self._maxfun = kwds['maxfun'] if 'maxfun' in kwds else evaluations
# handle if new (reset counter, instead of extend counter)
if new:
if generations is not None:
self._maxiter += self.generations
else:
self._maxiter = "*" #XXX: better as self._newmax = True ?
if evaluations is not None:
self._maxfun += self.evaluations
else:
self._maxfun = "*"
return
def _SetEvaluationLimits(self, iterscale=None, evalscale=None):
"""set the evaluation limits"""
if iterscale is None: iterscale = 10
if evalscale is None: evalscale = 1000
N = len(self.population[0]) # usually self.nDim
# if SetEvaluationLimits not applied, use the solver default
if self._maxiter is None:
self._maxiter = N * self.nPop * iterscale
elif self._maxiter == "*": # (i.e. None, but 'reset counter')
self._maxiter = (N * self.nPop * iterscale) + self.generations
if self._maxfun is None:
self._maxfun = N * self.nPop * evalscale
elif self._maxiter == "*":
self._maxfun = (N * self.nPop * evalscale) + self.evaluations
return
def Terminated(self, disp=False, info=False, termination=None):
"""check if the solver meets the given termination conditions
Input::
- disp = if True, print termination statistics and/or warnings
- info = if True, return termination message (instead of boolean)
- termination = termination conditions to check against
Note::
If no termination conditions are given, the solver's stored
termination conditions will be used.
"""
if termination is None:
termination = self._termination
# ensure evaluation limits have been imposed
self._SetEvaluationLimits()
# check for termination messages
msg = termination(self, info=True)
sig = "SolverInterrupt with %s" % {}
lim = "EvaluationLimits with %s" % {'evaluations':self._maxfun,
'generations':self._maxiter}
# push solver internals to scipy.optimize.fmin interface
if self._fcalls[0] >= self._maxfun and self._maxfun is not None:
msg = lim #XXX: prefer the default stop ?
if disp:
print "Warning: Maximum number of function evaluations has "\
"been exceeded."
elif self.generations >= self._maxiter and self._maxiter is not None:
msg = lim #XXX: prefer the default stop ?
if disp:
print "Warning: Maximum number of iterations has been exceeded"
elif self._EARLYEXIT:
msg = sig
if disp:
print "Warning: Optimization terminated with signal interrupt."
elif msg and disp:
print "Optimization terminated successfully."
print " Current function value: %f" % self.bestEnergy
print " Iterations: %d" % self.generations
print " Function evaluations: %d" % self._fcalls[0]
if info:
return msg
return bool(msg)
def SetTermination(self, termination): # disp ?
"""set the termination conditions"""
#XXX: validate that termination is a 'condition' ?
self._termination = termination
self._collapse = False
if termination is not None:
from mystic.termination import state
self._collapse = any(key.startswith('Collapse') for key in state(termination).iterkeys())
return
def SetObjective(self, cost, ExtraArgs=None): # callback=None/False ?
"""decorate the cost function with bounds, penalties, monitors, etc"""
_cost,_raw,_args = self._cost
# check if need to 'wrap' or can return the stored cost
if (cost is None or cost is _raw or cost is _cost) and \
(ExtraArgs is None or ExtraArgs is _args):
return
# get cost and args if None was given
if cost is None: cost = _raw
args = _args if ExtraArgs is None else ExtraArgs
args = () if args is None else args
# quick validation check (so doesn't screw up internals)
if not isvalid(cost, [0]*self.nDim, *args):
try: name = cost.__name__
except AttributeError: # raise new error for non-callables
cost(*args)
validate(cost, None, *args)
#val = len(args) + 1 #XXX: 'klepto.validate' for better error?
#msg = '%s() invalid number of arguments (%d given)' % (name, val)
#raise TypeError(msg)
# hold on to the 'raw' cost function
self._cost = (None, cost, ExtraArgs)
self._live = False
return
def Collapsed(self, disp=False, info=False):
"""check if the solver meets the given collapse conditions
Input::
- disp = if True, print details about the solver state at collapse
- info = if True, return collapsed state (instead of boolean)
"""
stop = getattr(self, '__stop__', self.Terminated(info=True))
import mystic.collapse as ct
collapses = ct.collapsed(stop) or dict()
if collapses and disp:
for (k,v) in collapses.iteritems():
print " %s: %s" % (k.split()[0],v)
#print "# Collapse at: Generation", self._stepmon._step-1, \
# "with", self.bestEnergy, "@\n#", list(self.bestSolution)
return collapses if info else bool(collapses)
def Collapse(self, disp=False):
"""if solver has terminated by collapse, apply the collapse"""
collapses = self.Collapsed(disp=disp, info=True)
if collapses: # then stomach a bunch of module imports (yuck)
import mystic.tools as to
import mystic.termination as mt
import mystic.constraints as cn
import mystic.mask as ma
# get collapse conditions #XXX: efficient? 4x loops over collapses
state = mt.state(self._termination)
npts = getattr(self._stepmon, '_npts', None) #XXX: default?
conditions = [cn.impose_at(*to.select_params(self,collapses[k])) if state[k].get('target') is None else cn.impose_at(collapses[k],state[k].get('target')) for k in collapses if k.startswith('CollapseAt')]
conditions += [cn.impose_as(collapses[k],state[k].get('offset')) for k in collapses if k.startswith('CollapseAs')]
# get measure collapse conditions
if npts: #XXX: faster/better if comes first or last?
conditions += [cn.impose_measure( npts, [collapses[k] for k in collapses if k.startswith('CollapsePosition')], [collapses[k] for k in collapses if k.startswith('CollapseWeight')] )]
# update termination and constraints in solver
constraints = to.chain(*conditions)(self._constraints)
termination = ma.update_mask(self._termination, collapses)
self.SetConstraints(constraints)
self.SetTermination(termination)
#print mt.state(self._termination).keys()
return collapses
def _update_objective(self):
"""decorate the cost function with bounds, penalties, monitors, etc"""
# rewrap the cost if the solver has been run
if False: # trigger immediately
self._decorate_objective(*self._cost[1:])
else: # delay update until _bootstrap
self.Finalize()
return
def _decorate_objective(self, cost, ExtraArgs=None):
"""decorate the cost function with bounds, penalties, monitors, etc"""
#print ("@", cost, ExtraArgs, max)
raw = cost
if ExtraArgs is None: ExtraArgs = ()
self._fcalls, cost = wrap_function(cost, ExtraArgs, self._evalmon)
if self._useStrictRange:
indx = list(self.popEnergy).index(self.bestEnergy)
ngen = self.generations #XXX: no random if generations=0 ?
for i in range(self.nPop):
self.population[i] = self._clipGuessWithinRangeBoundary(self.population[i], (not ngen) or (i is indx))
cost = wrap_bounds(cost, self._strictMin, self._strictMax)
cost = wrap_penalty(cost, self._penalty)
cost = wrap_nested(cost, self._constraints)
if self._reducer:
#cost = reduced(*self._reducer)(cost) # was self._reducer = (f,bool)
cost = reduced(self._reducer, arraylike=True)(cost)
# hold on to the 'wrapped' and 'raw' cost function
self._cost = (cost, raw, ExtraArgs)
self._live = True
return cost
def _bootstrap_objective(self, cost=None, ExtraArgs=None):
"""HACK to enable not explicitly calling _decorate_objective"""
_cost,_raw,_args = self._cost
# check if need to 'wrap' or can return the stored cost
if (cost is None or cost is _raw or cost is _cost) and \
(ExtraArgs is None or ExtraArgs is _args) and self._live:
return _cost
# 'wrap' the 'new' cost function with _decorate
self.SetObjective(cost, ExtraArgs)
return self._decorate_objective(*self._cost[1:])
#XXX: when _decorate called, solver._fcalls will be reset ?
def _Step(self, cost=None, ExtraArgs=None, **kwds):
"""perform a single optimization iteration
*** this method must be overwritten ***"""
raise NotImplementedError, "an optimization algorithm was not provided"
def SaveSolver(self, filename=None, **kwds):
"""save solver state to a restart file"""
import dill
fd = None
if filename is None: # then check if already has registered file
if self._state is None: # then create a new one
import os, tempfile
fd, self._state = tempfile.mkstemp(suffix='.pkl')
os.close(fd)
filename = self._state
self._state = filename
f = file(filename, 'wb')
try:
dill.dump(self, f, **kwds)
self._stepmon.info('DUMPED("%s")' % filename) #XXX: before / after ?
finally:
f.close()
return
def __save_state(self, force=False):
"""save the solver state, if chosen save frequency is met"""
# save the last iteration
if force and bool(self._state):
self.SaveSolver()
return
# save the zeroth iteration
nonzero = True #XXX: or bool(self.generations) ?
# after _saveiter generations, then save state
iters = self._saveiter
saveiter = bool(iters) and not bool(self.generations % iters)
if nonzero and saveiter:
self.SaveSolver()
#FIXME: if _saveeval (or more) since last check, then save state
#save = self.evaluations % self._saveeval
return
def __load_state(self, solver, **kwds):
"""load solver.__dict__ into self.__dict__; override with kwds"""
#XXX: should do some filtering on kwds ?
self.__dict__.update(solver.__dict__, **kwds)
return
def Finalize(self, **kwds):
"""cleanup upon exiting the main optimization loop"""
self._live = False
return
def _process_inputs(self, kwds):
"""process and activate input settings"""
#allow for inputs that don't conform to AbstractSolver interface
#NOTE: not sticky: callback, disp
#NOTE: sticky: EvaluationMonitor, StepMonitor, penalty, constraints
settings = \
{'callback':None, #user-supplied function, called after each step
'disp':0} #non-zero to print convergence messages
[settings.update({i:j}) for (i,j) in kwds.items() if i in settings]
# backward compatibility
if 'EvaluationMonitor' in kwds: \
self.SetEvaluationMonitor(kwds['EvaluationMonitor'])
if 'StepMonitor' in kwds: \
self.SetGenerationMonitor(kwds['StepMonitor'])
if 'penalty' in kwds: \
self.SetPenalty(kwds['penalty'])
if 'constraints' in kwds: \
self.SetConstraints(kwds['constraints'])
return settings
def Step(self, cost=None, termination=None, ExtraArgs=None, **kwds):
"""Take a single optimiztion step using the given 'cost' function.
Description:
Uses an optimization algorithm to take one 'step' toward
the minimum of a function of one or more variables.
Inputs:
cost -- the Python function or method to be minimized.
Additional Inputs:
termination -- callable object providing termination conditions.
ExtraArgs -- extra arguments for cost.
Further Inputs:
callback -- an optional user-supplied function to call after each
iteration. It is called as callback(xk), where xk is
the current parameter vector. [default = None]
disp -- non-zero to print convergence messages.
Notes:
If the algorithm does not meet the given termination conditions after
the call to "Step", the solver may be left in an "out-of-sync" state.
When abandoning an non-terminated solver, one should call "Finalize"
to make sure the solver is fully returned to a "synchronized" state.
To run the solver until termination, call "Solve()". Alternately, use
Terminated()" as the condition in a while loop over "Step".
"""
disp = kwds.pop('disp', False)
# register: cost, termination, ExtraArgs
cost = self._bootstrap_objective(cost, ExtraArgs)
if termination is not None: self.SetTermination(termination)
# check termination before 'stepping'
if len(self._stepmon):
msg = self.Terminated(disp=disp, info=True) or None
else: msg = None
# if not terminated, then take a step
if msg is None:
self._Step(**kwds) #FIXME: not all kwds are given in __doc__
if self.Terminated(): # then cleanup/finalize
self.Finalize()
# get termination message and log state
msg = self.Terminated(disp=disp, info=True) or None
if msg:
self._stepmon.info('STOP("%s")' % msg)
self.__save_state(force=True)
return msg
def Solve(self, cost=None, termination=None, ExtraArgs=None, **kwds):
"""Minimize a 'cost' function with given termination conditions.
Description:
Uses an optimization algorithm to find the minimum of
a function of one or more variables.
Inputs:
cost -- the Python function or method to be minimized.
Additional Inputs:
termination -- callable object providing termination conditions.
ExtraArgs -- extra arguments for cost.
Further Inputs:
sigint_callback -- callback function for signal handler.
callback -- an optional user-supplied function to call after each
iteration. It is called as callback(xk), where xk is
the current parameter vector. [default = None]
disp -- non-zero to print convergence messages.
"""
# process and activate input settings
sigint_callback = kwds.pop('sigint_callback', None)
settings = self._process_inputs(kwds)
disp = settings.get('disp', False)
# set up signal handler
self._EARLYEXIT = False #XXX: why not use EARLYEXIT singleton?
self._generateHandler(sigint_callback)
# activate signal handler
#import threading as thread
#mainthread = isinstance(thread.current_thread(), thread._MainThread)
#if mainthread: #XXX: if not mainthread, signal will raise ValueError
import signal
if self._handle_sigint:
signal.signal(signal.SIGINT,self.signal_handler)
# register: cost, termination, ExtraArgs
cost = self._bootstrap_objective(cost, ExtraArgs)
if termination is not None: self.SetTermination(termination)
#XXX: self.Step(cost, termination, ExtraArgs, **settings) ?
# the main optimization loop
stop = False
while not stop:
stop = self.Step(**settings) #XXX: remove need to pass settings?
continue
# if collapse, then activate any relevant collapses and continue
self.__stop__ = stop #HACK: avoid re-evaluation of Termination
while self._collapse and self.Collapse(disp=disp):
del self.__stop__ #HACK
stop = False
while not stop:
stop = self.Step(**settings) #XXX: move Collapse inside of Step?
continue
self.__stop__ = stop #HACK
del self.__stop__ #HACK
# restore default handler for signal interrupts
if self._handle_sigint:
signal.signal(signal.SIGINT,signal.default_int_handler)
return
def __copy__(self):
cls = self.__class__
result = cls.__new__(cls)
result.__dict__.update(self.__dict__)
return result
def __deepcopy__(self, memo):
import copy
import dill
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
for k, v in self.__dict__.items():
if v is self._cost:
setattr(result, k, tuple(dill.copy(i) for i in v))
else:
try: #XXX: work-around instancemethods in python2.6
setattr(result, k, copy.deepcopy(v, memo))
except TypeError:
setattr(result, k, dill.copy(v))
return result
# extensions to the solver interface
evaluations = property(__evaluations )
generations = property(__generations )
energy_history = property(__energy_history,__set_energy_history )
solution_history = property(__solution_history,__set_solution_history )
bestEnergy = property(__bestEnergy,__set_bestEnergy )
bestSolution = property(__bestSolution,__set_bestSolution )
pass
if __name__=='__main__':
help(__name__)
# end of file
| [
"mmckerns@968178ea-60bd-409e-af13-df8a517b6005"
] | mmckerns@968178ea-60bd-409e-af13-df8a517b6005 |
7b00ee3b92761685a2e32d3a4d48ca7ab9336fda | 25c0e72ea6889749cb269dfd26a77edfc4207d40 | /fuzzers/009-xor_b_mux/fuzzer.py | 10eff1adfe642697d899998d81e1ec56c7552e86 | [
"0BSD"
] | permissive | whitequark/prjbureau | 49c2d060ca7b99042fdc751e70f10ad74309975b | cbe15e117449c55e7244756f00c3e34e0d92017e | refs/heads/main | 2023-08-16T10:34:53.915942 | 2021-11-27T21:34:41 | 2021-11-27T21:34:41 | 227,539,435 | 44 | 8 | NOASSERTION | 2023-08-07T16:12:37 | 2019-12-12T06:48:44 | Python | UTF-8 | Python | false | false | 1,577 | py | from util import database, toolchain, bitdiff, progress
with database.transact() as db:
for device_name, device in db.items():
progress(device_name)
package, pinout = next(iter(device['pins'].items()))
for macrocell_idx, (macrocell_name, macrocell) in enumerate(device['macrocells'].items()):
progress(1)
def run(code):
return toolchain.run(
f"module top(input CLK, output O); "
f"wire Q; TRI tri(Q, 1'b0, O); "
f"{code} "
f"endmodule",
{
'CLK': pinout['C1'],
'ff': str(601 + macrocell_idx),
},
f"{device_name}-{package}")
f_dff = run("DFF ff(.CLK(CLK), .D(1'b0), .Q(Q));")
f_tff = run("TFF ff(.CLK(CLK), .T(1'b0), .Q(Q));")
# The GND choice of XOR B mux is shared with !PT1 and !PT2 choices: if xor_invert
# is off, then it is GND; otherwise: if pt2_mux is xor and xor_a_mux is sum, then
# it is !PT2; if pt1_mux is flb and xor_a_mux is VCC_pt2, then it is !PT1; otherwise
# it is GND. Further, the XOR B mux is linked to FLB: if XOR B mux is !PT1, then FLB
# is always 1, otherwise FLB follows pt1_mux.
macrocell.update({
'xor_b_mux':
bitdiff.describe(1, {
'VCC_pt12': f_dff,
'ff_qn': f_tff
})
})
| [
"[email protected]"
] | |
bff60c91bc6b4841943f12e48362e1aa2fbd2a68 | 6f78a4c4896563a52d86eacf49dbb6a358a3646e | /hackerrank/python/hackerrank_GreedyFlorist.py | 16ef36a533bd86a3d30da8b3791ca3b24de10ad2 | [] | no_license | wj1224/algorithm_solve | 259c39d2a85ecb2630e089eb0c86cdde9ff3baeb | 8b0f15b71a4dd8eb40d3c9baee003a0678c3f2aa | refs/heads/master | 2023-08-25T06:14:21.615802 | 2021-10-26T05:00:59 | 2021-10-26T05:00:59 | 219,981,525 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 683 | py | #!/bin/python3
import math
import os
import random
import re
import sys
# Complete the getMinimumCost function below.
def getMinimumCost(k, c):
c.sort()
p = dict()
for i in range(k):
p[i] = 0
answer = 0
idx = 0
for i in range(len(c) -1, -1, -1):
if p[idx] == 0:
p[idx] = 1
answer += c[i]
else:
answer += ((p[idx] + 1) * c[i])
p[idx] += 1
idx += 1
if idx == k:
idx = 0
return answer
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
nk = input().split()
n = int(nk[0])
k = int(nk[1])
c = list(map(int, input().rstrip().split()))
minimumCost = getMinimumCost(k, c)
fptr.write(str(minimumCost) + '\n')
fptr.close()
| [
"[email protected]"
] | |
6a745e9f87961ebcdf3c39a8a5a82bb8766d12fd | 9ae6ce54bf9a2a86201961fdbd5e7b0ec913ff56 | /google/ads/googleads/v11/resources/types/custom_interest.py | 99614684ab894f1604193e87090c4c4a6cf627c7 | [
"Apache-2.0"
] | permissive | GerhardusM/google-ads-python | 73b275a06e5401e6b951a6cd99af98c247e34aa3 | 676ac5fcb5bec0d9b5897f4c950049dac5647555 | refs/heads/master | 2022-07-06T19:05:50.932553 | 2022-06-17T20:41:17 | 2022-06-17T20:41:17 | 207,535,443 | 0 | 0 | Apache-2.0 | 2019-09-10T10:58:55 | 2019-09-10T10:58:55 | null | UTF-8 | Python | false | false | 4,315 | py | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v11.enums.types import custom_interest_member_type
from google.ads.googleads.v11.enums.types import custom_interest_status
from google.ads.googleads.v11.enums.types import custom_interest_type
__protobuf__ = proto.module(
package="google.ads.googleads.v11.resources",
marshal="google.ads.googleads.v11",
manifest={"CustomInterest", "CustomInterestMember",},
)
class CustomInterest(proto.Message):
r"""A custom interest. This is a list of users by interest.
Attributes:
resource_name (str):
Immutable. The resource name of the custom interest. Custom
interest resource names have the form:
``customers/{customer_id}/customInterests/{custom_interest_id}``
id (int):
Output only. Id of the custom interest.
This field is a member of `oneof`_ ``_id``.
status (google.ads.googleads.v11.enums.types.CustomInterestStatusEnum.CustomInterestStatus):
Status of this custom interest. Indicates
whether the custom interest is enabled or
removed.
name (str):
Name of the custom interest. It should be
unique across the same custom affinity audience.
This field is required for create operations.
This field is a member of `oneof`_ ``_name``.
type_ (google.ads.googleads.v11.enums.types.CustomInterestTypeEnum.CustomInterestType):
Type of the custom interest, CUSTOM_AFFINITY or
CUSTOM_INTENT. By default the type is set to
CUSTOM_AFFINITY.
description (str):
Description of this custom interest audience.
This field is a member of `oneof`_ ``_description``.
members (Sequence[google.ads.googleads.v11.resources.types.CustomInterestMember]):
List of custom interest members that this
custom interest is composed of. Members can be
added during CustomInterest creation. If members
are presented in UPDATE operation, existing
members will be overridden.
"""
resource_name = proto.Field(proto.STRING, number=1,)
id = proto.Field(proto.INT64, number=8, optional=True,)
status = proto.Field(
proto.ENUM,
number=3,
enum=custom_interest_status.CustomInterestStatusEnum.CustomInterestStatus,
)
name = proto.Field(proto.STRING, number=9, optional=True,)
type_ = proto.Field(
proto.ENUM,
number=5,
enum=custom_interest_type.CustomInterestTypeEnum.CustomInterestType,
)
description = proto.Field(proto.STRING, number=10, optional=True,)
members = proto.RepeatedField(
proto.MESSAGE, number=7, message="CustomInterestMember",
)
class CustomInterestMember(proto.Message):
r"""A member of custom interest audience. A member can be a
keyword or url. It is immutable, that is, it can only be created
or removed but not changed.
Attributes:
member_type (google.ads.googleads.v11.enums.types.CustomInterestMemberTypeEnum.CustomInterestMemberType):
The type of custom interest member, KEYWORD
or URL.
parameter (str):
Keyword text when member_type is KEYWORD or URL string when
member_type is URL.
This field is a member of `oneof`_ ``_parameter``.
"""
member_type = proto.Field(
proto.ENUM,
number=1,
enum=custom_interest_member_type.CustomInterestMemberTypeEnum.CustomInterestMemberType,
)
parameter = proto.Field(proto.STRING, number=3, optional=True,)
__all__ = tuple(sorted(__protobuf__.manifest))
| [
"[email protected]"
] | |
3e5d83152601273afe64ccec38a1da8f975d3f69 | d2cacbd1bde10e464faabc22ad5936f1aaf4e2ef | /data/Exp_ICIP/SingleTraining/Standard/chess/main.py | fd7f9227b6f2876e357707f2b72f4758b385fefe | [] | no_license | npiasco/dl_management | a26950a3b53c720d881a8b7ac3fa81161a048256 | 11c29a3637efa5fd223b36664d62c704e8166bab | refs/heads/master | 2021-03-16T05:44:39.806437 | 2019-09-06T13:52:52 | 2019-09-06T13:52:52 | 124,055,338 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,585 | py | #!/usr/bin/env python
import os, sys
import setlog
conf_file = os.environ['DEV'] + 'dl_management/.log/logging.yaml'
save_file = os.path.abspath(sys.argv[0])[:-len(sys.argv[0])] + 'log/'
setlog.reconfigure(conf_file, save_file)
import system.PoseRegression as System
if __name__ == '__main__':
scene = 'chess'
machine = System.MultNet(root=os.path.abspath(sys.argv[0])[:-len(sys.argv[0])],
#trainer_file='../../feat_trainer.yaml',
trainer_file= 'trainer.yaml',
#trainer_file='../../trainer_depth.yaml',
dataset_file = '../../../datasets/' + scene + '.yaml',
#cnn_type='../../cnn.yaml'
cnn_type='../../vladcnn.yaml'
)
action = input('Exec:\n[t]\ttrain\n[e]\ttest\n[p]\tprint (console)\n[P]\tprint (full)\n[ ]\ttrain+test\n')
if action == 't':
machine.train()
elif action == 'e':
machine.test()
machine.plot(print_loss=False, print_val=False)
elif action == 'ef':
machine.test_on_final()
machine.plot(print_loss=False, print_val=False)
elif action == 'p':
machine.plot(print_loss=False, print_val=False)
elif action == 'P':
machine.plot()
elif action == 'm':
machine.map_print(batch_size=1)
elif action == 'mf':
machine.map_print(final=True, batch_size=1)
elif action == 's':
machine.serialize_net()
elif action == 'sf':
machine.serialize_net(final=True)
elif action == 'pose':
machine.view_localization(pas=3)
elif action == 'posef':
machine.view_localization(pas=3, final=True)
elif action == 'model':
machine.creat_model()
elif action == 'modeld':
machine.creat_model(fake_depth=True)
elif action == 'modelt':
machine.creat_model(test=True)
elif action == 'modeldt':
machine.creat_model(test=True, fake_depth=True)
elif action == 'clusters':
machine.creat_clusters(64, size_feat=256, map_feat='conv7')
elif action == 'thresh':
machine.threshold_selection(final=True, dataset='test', load=False, beg=0.0, n_values=2000)
elif action == 'threshl':
machine.threshold_selection(final=True, dataset='test', load=True, beg=0.0, n_values=2000)
elif action == '':
machine.train()
machine.test()
machine.plot(print_loss=False, print_val=False)
else:
raise ValueError('Unknown cmd: {}'.format(action))
| [
"[email protected]"
] | |
5497f6ee6391b9ac43175da5e71e6258fe100482 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2343/60747/290105.py | acd1d87a6a3d7f48f8d17e9d3a7187668c2002bd | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 168 | py | n=input().split(" ")
s=""
for i in range(int(n[0])+int(n[1])):
s=s+input()
if s=="n<><>un<>nnuonuu<>un<><>u<><>o<><>n<><>u<><>n<><>u":print("RLLRLRR")
else:print(s) | [
"[email protected]"
] | |
422b6c2b3875c8bc1e6c5e9adb460cba8e8e15e5 | f303936feb0f221ea1ccb3ef1eae57654aa0325a | /server/szurubooru/func/users.py | 3c39fb5a119d15c7080f29da463b20de76a58417 | [] | no_license | hnamquoc/szurubooru | 7b695e232c7f601dc95f77fbb7570aef3e16ddd9 | 16d4d3ca68964eb7759b629ec84eb6b14d9d7cdb | refs/heads/master | 2020-12-31T01:23:26.322422 | 2016-05-21T20:29:31 | 2016-05-21T20:35:18 | 59,414,380 | 1 | 0 | null | 2016-05-22T13:41:37 | 2016-05-22T13:41:36 | null | UTF-8 | Python | false | false | 6,152 | py | import datetime
import re
from sqlalchemy import func
from szurubooru import config, db, errors
from szurubooru.func import auth, util, files, images
class UserNotFoundError(errors.NotFoundError): pass
class UserAlreadyExistsError(errors.ValidationError): pass
class InvalidUserNameError(errors.ValidationError): pass
class InvalidEmailError(errors.ValidationError): pass
class InvalidPasswordError(errors.ValidationError): pass
class InvalidRankError(errors.ValidationError): pass
class InvalidAvatarError(errors.ValidationError): pass
def serialize_user(user, authenticated_user, force_show_email=False):
if not user:
return {}
ret = {
'name': user.name,
'rank': user.rank,
'creationTime': user.creation_time,
'lastLoginTime': user.last_login_time,
'avatarStyle': user.avatar_style,
'email': user.email,
}
if user.avatar_style == user.AVATAR_GRAVATAR:
ret['avatarUrl'] = 'http://gravatar.com/avatar/%s?d=retro&s=%d' % (
util.get_md5((user.email or user.name).lower()),
config.config['thumbnails']['avatar_width'])
else:
ret['avatarUrl'] = '%s/avatars/%s.png' % (
config.config['data_url'].rstrip('/'), user.name.lower())
if authenticated_user.user_id != user.user_id \
and not force_show_email \
and not auth.has_privilege(authenticated_user, 'users:edit:any:email'):
del ret['email']
return ret
def serialize_user_with_details(user, authenticated_user, **kwargs):
return {'user': serialize_user(user, authenticated_user, **kwargs)}
def get_user_count():
return db.session.query(db.User).count()
def try_get_user_by_name(name):
return db.session \
.query(db.User) \
.filter(func.lower(db.User.name) == func.lower(name)) \
.one_or_none()
def get_user_by_name(name):
user = try_get_user_by_name(name)
if not user:
raise UserNotFoundError('User %r not found.' % name)
return user
def try_get_user_by_name_or_email(name_or_email):
return db.session \
.query(db.User) \
.filter(
(func.lower(db.User.name) == func.lower(name_or_email))
| (func.lower(db.User.email) == func.lower(name_or_email))) \
.one_or_none()
def get_user_by_name_or_email(name_or_email):
user = try_get_user_by_name_or_email(name_or_email)
if not user:
raise UserNotFoundError('User %r not found.' % name_or_email)
return user
def create_user(name, password, email):
user = db.User()
update_user_name(user, name)
update_user_password(user, password)
update_user_email(user, email)
if get_user_count() > 0:
user.rank = util.flip(auth.RANK_MAP)[config.config['default_rank']]
else:
user.rank = db.User.RANK_ADMINISTRATOR
user.creation_time = datetime.datetime.now()
user.avatar_style = db.User.AVATAR_GRAVATAR
return user
def update_user_name(user, name):
if not name:
raise InvalidUserNameError('Name cannot be empty.')
if util.value_exceeds_column_size(name, db.User.name):
raise InvalidUserNameError('User name is too long.')
other_user = try_get_user_by_name(name)
if other_user and other_user.user_id != user.user_id:
raise UserAlreadyExistsError('User %r already exists.' % name)
name = name.strip()
name_regex = config.config['user_name_regex']
if not re.match(name_regex, name):
raise InvalidUserNameError(
'User name %r must satisfy regex %r.' % (name, name_regex))
user.name = name
def update_user_password(user, password):
if not password:
raise InvalidPasswordError('Password cannot be empty.')
password_regex = config.config['password_regex']
if not re.match(password_regex, password):
raise InvalidPasswordError(
'Password must satisfy regex %r.' % password_regex)
user.password_salt = auth.create_password()
user.password_hash = auth.get_password_hash(user.password_salt, password)
def update_user_email(user, email):
if email:
email = email.strip()
if not email:
email = None
if email and util.value_exceeds_column_size(email, db.User.email):
raise InvalidEmailError('Email is too long.')
if not util.is_valid_email(email):
raise InvalidEmailError('E-mail is invalid.')
user.email = email
def update_user_rank(user, rank, authenticated_user):
if not rank:
raise InvalidRankError('Rank cannot be empty.')
rank = util.flip(auth.RANK_MAP).get(rank.strip(), None)
all_ranks = list(auth.RANK_MAP.values())
if not rank:
raise InvalidRankError(
'Rank can be either of %r.' % all_ranks)
if rank in (db.User.RANK_ANONYMOUS, db.User.RANK_NOBODY):
raise InvalidRankError('Rank %r cannot be used.' % auth.RANK_MAP[rank])
if all_ranks.index(authenticated_user.rank) \
< all_ranks.index(rank) and get_user_count() > 0:
raise errors.AuthError('Trying to set higher rank than your own.')
user.rank = rank
def update_user_avatar(user, avatar_style, avatar_content):
if avatar_style == 'gravatar':
user.avatar_style = user.AVATAR_GRAVATAR
elif avatar_style == 'manual':
user.avatar_style = user.AVATAR_MANUAL
if not avatar_content:
raise InvalidAvatarError('Avatar content missing.')
image = images.Image(avatar_content)
image.resize_fill(
int(config.config['thumbnails']['avatar_width']),
int(config.config['thumbnails']['avatar_height']))
files.save('avatars/' + user.name.lower() + '.png', image.to_png())
else:
raise InvalidAvatarError(
'Avatar style %r is invalid. Valid avatar styles: %r.' % (
avatar_style, ['gravatar', 'manual']))
def bump_user_login_time(user):
user.last_login_time = datetime.datetime.now()
def reset_user_password(user):
password = auth.create_password()
user.password_salt = auth.create_password()
user.password_hash = auth.get_password_hash(user.password_salt, password)
return password
| [
"[email protected]"
] | |
23b822722a4b4ca822ac1f006b41a993c869e3de | 7708007df6f36203dbea6ffcc93181cf100f4cb5 | /Pulpit/python90/repozytorium/Lista_liczb.py | bee7855eb16f733381a272e3c28809f38d0e908a | [] | no_license | Marcin9009/marcin.pelszyk | 87e8b91ef1150f867677121f6d47e9aecc0be8a8 | ff20d11051db666a00b07e404037d80bebcf5fc1 | refs/heads/master | 2022-12-31T03:41:59.737766 | 2020-10-17T08:48:56 | 2020-10-17T08:48:56 | 290,585,259 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 52 | py | numbers=[1, 2, 3, 4, 5, 6, 7, 8]
print (numbers[-2]) | [
"[email protected]"
] | |
f4634675d8b38cab07ad7568cd4c1eb03d5df4c7 | bddc40a97f92fafb8cbbbfdbdfe6774996578bb0 | /exercicioLista01/ex09.py | 3d885e7b04164115b80e89e056de00459bd1665a | [] | no_license | andrehmiguel/treinamento | 8f83041bd51387dd3e5cafed09c4bb0a08d0e375 | ed18e6a8cfba0baaa68757c12893c62a0938a67e | refs/heads/main | 2023-01-31T13:15:58.113392 | 2020-12-16T02:47:44 | 2020-12-16T02:47:44 | 317,631,214 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 274 | py | #09 Faça um Programa que peça a temperatura em graus Fahrenheit, transforme e mostre a temperatura em graus Celsius.
#C = 5 * ((F-32) / 9).
F = int(input("Informe a temperatura em F: "))
C = 5 * ((F-32) / 9)
print(F, "graus Fahrenheit, equivalem a", C, "graus Celsius.") | [
"[email protected]"
] | |
7bcb3f7715b24df699c20989fad420f6b3ed7bb7 | 930a868ae9bbf85df151b3f54d04df3a56bcb840 | /benchmark/slurm_utilities/slurm_rerun_failed.py | e949b3379c738b6570226a175e386621067b1975 | [
"MIT"
] | permissive | yuewuo/QEC-Playground | 1148f3c5f4035c069986d8b4103acf7f1e34f9d4 | 462208458cdf9dc8a33d4553a560f8a16c00e559 | refs/heads/main | 2023-08-10T13:05:36.617858 | 2023-07-22T23:48:49 | 2023-07-22T23:48:49 | 312,809,760 | 16 | 1 | MIT | 2023-07-22T23:48:51 | 2020-11-14T12:10:38 | Python | UTF-8 | Python | false | false | 1,290 | py | import os, sys, subprocess, time
import slurm_distribute
def rerun_failed(sbatch_file_path, failed_cases, slurm_commands_vec=None, use_interactive_partition=False):
# generate rerun sbatch file
sbatch_file_folder = os.path.dirname(sbatch_file_path)
rerun_file_path = os.path.join(sbatch_file_folder, "rerun-" + os.path.basename(sbatch_file_path))
with open(sbatch_file_path, "r", encoding="utf8") as f:
lines = f.readlines()
with open(rerun_file_path, "w", encoding="utf8") as f:
for line in lines:
if line.startswith("#SBATCH --array="):
f.write(f"#SBATCH --array={','.join([str(e) for e in failed_cases])}\n")
else:
f.write(line)
print("rerun_file_path", rerun_file_path)
slurm_distribute.slurm_run_sbatch_wait(rerun_file_path, failed_cases, original_sbatch_file_path=sbatch_file_path, slurm_commands_vec=slurm_commands_vec, use_interactive_partition=use_interactive_partition)
if __name__ == "__main__":
if len(sys.argv) != 3:
print("usage: <sbatch_file_path> <failed_cases: comma separated>")
exit(-1)
sbatch_file_path = os.path.abspath(sys.argv[1])
failed_cases = [int(e) for e in sys.argv[2].split(",")]
rerun_failed(sbatch_file_path, failed_cases)
| [
"[email protected]"
] | |
fde8f6ad08e5e3d32823235fefd03f42d934eaa8 | 16aba0619caf0ad5ffd4817ac0575943e8b4659e | /venv/lib/python3.6/shutil.py | 05c167c5d99fc2ca0c6aa19945e50d8fdc9c7aa1 | [] | no_license | Rin94/seleniumtests | 23cd0b00609fa4b00216dc4fa27a96637ee0aedd | 3ae6a87b2410ed5a6f66f1b69915071c3517b5f8 | refs/heads/main | 2023-05-23T11:02:33.443620 | 2021-06-13T04:43:53 | 2021-06-13T04:43:53 | 376,441,010 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 46 | py | /Users/jared/anaconda3/lib/python3.6/shutil.py | [
"[email protected]"
] | |
e47c89d5d5eb49f7afe555e7c8cf6ec48d648a26 | 30e018214a16567201d1d6137dddf597e5f1d0e8 | /mbtaapi/apis/prediction_api.py | 72d2056ba61f1163fd5744609ab4ca1004893d79 | [] | no_license | jbarciauskas/mbtaapi3 | 6a402fb789b70bf8f08c3e27559e141b535c8491 | 2d63c57113d4f2266e8f59a2f6b690fe5ffe102e | refs/heads/master | 2021-08-30T02:36:11.465499 | 2017-12-15T18:52:57 | 2017-12-15T18:52:57 | 114,401,885 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,372 | py | # coding: utf-8
"""
MBTA
MBTA service API. https://www.mbta.com
OpenAPI spec version: 3.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class PredictionApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def api_web_prediction_controller_index(self, **kwargs):
"""
**NOTE:** A filter **MUST** be present for any predictions to be returned. List of predictions for trips. To get the scheduled times instead of the predictions, use `/schedules`. The predicted arrival time (`//data/{index}/attributes/arrival_time`) and departure time (`/data/{index}/attributes/departure_time`) to/from a stop (`/data/{index}/relationships/stop/data/id`) at a given sequence (`/data/{index}/attriutes/stop_sequence`) along a trip (`/data/{index}/relationships/trip/data/id`) going a direction (`/data/{index}/attributes/direction_id`) along a route (`/data/{index}/relationships/route/data/id`). See [GTFS Realtime `FeedMesage` `FeedEntity` `TripUpdate` `TripDescriptor`](https://github.com/google/transit/blob/master/gtfs-realtime/spec/en/reference.md#message-tripdescriptor) See [GTFS Realtime `FeedMesage` `FeedEntity` `TripUpdate` `StopTimeUpdate`](https://github.com/google/transit/blob/master/gtfs-realtime/spec/en/reference.md#message-stoptimeupdate) ## When a vehicle is predicted to be at a stop `/predictions?filter[stop]=STOP_ID` ## The predicted schedule for one route `/predictions?filter[route]=ROUTE_ID` ## The predicted schedule for a whole trip `/predictions?filter[trip]=TRIP_ID`
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.api_web_prediction_controller_index(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int page_offset: Offset (0-based) of first element in the page
:param int page_limit: Max number of elements to return
:param str sort: Results can be [sorted](http://jsonapi.org/format/#fetching-sorting) by the id or any `/data/{index}/attributes` key. Assumes ascending; may be prefixed with '-' for descending | JSON pointer | Direction | `sort` | |--------------|-----------|------------| | `/data/{index}/attributes/arrival_time` | ascending | `arrival_time` | | `/data/{index}/attributes/arrival_time` | descending | `-arrival_time` | | `/data/{index}/attributes/departure_time` | ascending | `departure_time` | | `/data/{index}/attributes/departure_time` | descending | `-departure_time` | | `/data/{index}/attributes/direction_id` | ascending | `direction_id` | | `/data/{index}/attributes/direction_id` | descending | `-direction_id` | | `/data/{index}/attributes/schedule_relationship` | ascending | `schedule_relationship` | | `/data/{index}/attributes/schedule_relationship` | descending | `-schedule_relationship` | | `/data/{index}/attributes/status` | ascending | `status` | | `/data/{index}/attributes/status` | descending | `-status` | | `/data/{index}/attributes/stop_sequence` | ascending | `stop_sequence` | | `/data/{index}/attributes/stop_sequence` | descending | `-stop_sequence` | | `/data/{index}/attributes/track` | ascending | `track` | | `/data/{index}/attributes/track` | descending | `-track` |
:param str api_key: Key for API access. Without api_key, requests will be tracked by IP address and have stricter rate limit. [Register for a key](/register)
:param str include: Relationships to include. * `schedule` * `stop` * `route` * `trip` * `vehicle` * `alerts` The value of the include parameter **MUST** be a comma-separated (U+002C COMMA, \",\") list of relationship paths. A relationship path is a dot-separated (U+002E FULL-STOP, \".\") list of relationship names. [JSONAPI \"include\" behavior](http://jsonapi.org/format/#fetching-includes)
:param str filter_latitude: Latitude/Longitude must be both present or both absent.
:param str filter_longitude: Latitude/Longitude must be both present or both absent.
:param str filter_direction_id: Filter by direction of travel along the route. The meaning of `direction_id` varies based on the route. You can programmatically get the direction names from `/routes` `/data/{index}/attributes/direction_names` or `/routes/{id}` `/data/attributes/direction_names`.
:param str filter_stop: Filter by `/data/{index}/relationships/stop/data/id`. Multiple `/data/{index}/relationships/stop/data/id` **MUST** be a comma-separated (U+002C COMMA, \",\") list.
:param str filter_route: Filter by `/data/{index}/relationships/route/data/id`. Multiple `/data/{index}/relationships/route/data/id` **MUST** be a comma-separated (U+002C COMMA, \",\") list.
:param str filter_trip: Filter by `/data/{index}/relationships/trip/data/id`. Multiple `/data/{index}/relationships/trip/data/id` **MUST** be a comma-separated (U+002C COMMA, \",\") list.
:return: Predictions
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.api_web_prediction_controller_index_with_http_info(**kwargs)
else:
(data) = self.api_web_prediction_controller_index_with_http_info(**kwargs)
return data
def api_web_prediction_controller_index_with_http_info(self, **kwargs):
"""
**NOTE:** A filter **MUST** be present for any predictions to be returned. List of predictions for trips. To get the scheduled times instead of the predictions, use `/schedules`. The predicted arrival time (`//data/{index}/attributes/arrival_time`) and departure time (`/data/{index}/attributes/departure_time`) to/from a stop (`/data/{index}/relationships/stop/data/id`) at a given sequence (`/data/{index}/attriutes/stop_sequence`) along a trip (`/data/{index}/relationships/trip/data/id`) going a direction (`/data/{index}/attributes/direction_id`) along a route (`/data/{index}/relationships/route/data/id`). See [GTFS Realtime `FeedMesage` `FeedEntity` `TripUpdate` `TripDescriptor`](https://github.com/google/transit/blob/master/gtfs-realtime/spec/en/reference.md#message-tripdescriptor) See [GTFS Realtime `FeedMesage` `FeedEntity` `TripUpdate` `StopTimeUpdate`](https://github.com/google/transit/blob/master/gtfs-realtime/spec/en/reference.md#message-stoptimeupdate) ## When a vehicle is predicted to be at a stop `/predictions?filter[stop]=STOP_ID` ## The predicted schedule for one route `/predictions?filter[route]=ROUTE_ID` ## The predicted schedule for a whole trip `/predictions?filter[trip]=TRIP_ID`
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.api_web_prediction_controller_index_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int page_offset: Offset (0-based) of first element in the page
:param int page_limit: Max number of elements to return
:param str sort: Results can be [sorted](http://jsonapi.org/format/#fetching-sorting) by the id or any `/data/{index}/attributes` key. Assumes ascending; may be prefixed with '-' for descending | JSON pointer | Direction | `sort` | |--------------|-----------|------------| | `/data/{index}/attributes/arrival_time` | ascending | `arrival_time` | | `/data/{index}/attributes/arrival_time` | descending | `-arrival_time` | | `/data/{index}/attributes/departure_time` | ascending | `departure_time` | | `/data/{index}/attributes/departure_time` | descending | `-departure_time` | | `/data/{index}/attributes/direction_id` | ascending | `direction_id` | | `/data/{index}/attributes/direction_id` | descending | `-direction_id` | | `/data/{index}/attributes/schedule_relationship` | ascending | `schedule_relationship` | | `/data/{index}/attributes/schedule_relationship` | descending | `-schedule_relationship` | | `/data/{index}/attributes/status` | ascending | `status` | | `/data/{index}/attributes/status` | descending | `-status` | | `/data/{index}/attributes/stop_sequence` | ascending | `stop_sequence` | | `/data/{index}/attributes/stop_sequence` | descending | `-stop_sequence` | | `/data/{index}/attributes/track` | ascending | `track` | | `/data/{index}/attributes/track` | descending | `-track` |
:param str api_key: Key for API access. Without api_key, requests will be tracked by IP address and have stricter rate limit. [Register for a key](/register)
:param str include: Relationships to include. * `schedule` * `stop` * `route` * `trip` * `vehicle` * `alerts` The value of the include parameter **MUST** be a comma-separated (U+002C COMMA, \",\") list of relationship paths. A relationship path is a dot-separated (U+002E FULL-STOP, \".\") list of relationship names. [JSONAPI \"include\" behavior](http://jsonapi.org/format/#fetching-includes)
:param str filter_latitude: Latitude/Longitude must be both present or both absent.
:param str filter_longitude: Latitude/Longitude must be both present or both absent.
:param str filter_direction_id: Filter by direction of travel along the route. The meaning of `direction_id` varies based on the route. You can programmatically get the direction names from `/routes` `/data/{index}/attributes/direction_names` or `/routes/{id}` `/data/attributes/direction_names`.
:param str filter_stop: Filter by `/data/{index}/relationships/stop/data/id`. Multiple `/data/{index}/relationships/stop/data/id` **MUST** be a comma-separated (U+002C COMMA, \",\") list.
:param str filter_route: Filter by `/data/{index}/relationships/route/data/id`. Multiple `/data/{index}/relationships/route/data/id` **MUST** be a comma-separated (U+002C COMMA, \",\") list.
:param str filter_trip: Filter by `/data/{index}/relationships/trip/data/id`. Multiple `/data/{index}/relationships/trip/data/id` **MUST** be a comma-separated (U+002C COMMA, \",\") list.
:return: Predictions
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_offset', 'page_limit', 'sort', 'api_key', 'include', 'filter_latitude', 'filter_longitude', 'filter_direction_id', 'filter_stop', 'filter_route', 'filter_trip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method api_web_prediction_controller_index" % key
)
params[key] = val
del params['kwargs']
if 'page_offset' in params and params['page_offset'] < 0:
raise ValueError("Invalid value for parameter `page_offset` when calling `api_web_prediction_controller_index`, must be a value greater than or equal to `0`")
if 'page_limit' in params and params['page_limit'] < 1:
raise ValueError("Invalid value for parameter `page_limit` when calling `api_web_prediction_controller_index`, must be a value greater than or equal to `1`")
collection_formats = {}
path_params = {}
query_params = []
if 'page_offset' in params:
query_params.append(('page[offset]', params['page_offset']))
if 'page_limit' in params:
query_params.append(('page[limit]', params['page_limit']))
if 'sort' in params:
query_params.append(('sort', params['sort']))
if 'api_key' in params:
query_params.append(('api_key', params['api_key']))
if 'include' in params:
query_params.append(('include', params['include']))
if 'filter_latitude' in params:
query_params.append(('filter[latitude]', params['filter_latitude']))
if 'filter_longitude' in params:
query_params.append(('filter[longitude]', params['filter_longitude']))
if 'filter_direction_id' in params:
query_params.append(('filter[direction_id]', params['filter_direction_id']))
if 'filter_stop' in params:
query_params.append(('filter[stop]', params['filter_stop']))
if 'filter_route' in params:
query_params.append(('filter[route]', params['filter_route']))
if 'filter_trip' in params:
query_params.append(('filter[trip]', params['filter_trip']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/vnd.api+json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/vnd.api+json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/predictions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Predictions',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| [
"[email protected]"
] | |
8cb4a82a7784f2fd5a859c4e959b8d7f2d80c6f7 | c1d53d9dd9c3e8a7ca8692d85a34063fd32680c5 | /colour_datasets/loaders/tests/test_dyer2017.py | 5773cf8a71e856b9de1739fc4ad007ecaf40030e | [
"BSD-3-Clause"
] | permissive | colour-science/colour-datasets | d1aeb8269e85c0d971a4eb813cf38f124cbb23ab | 3a41452f0a7083abc01ea8af6e9ee27fc4e1ddaa | refs/heads/develop | 2023-08-31T00:16:29.756191 | 2023-08-30T10:16:47 | 2023-08-30T10:16:47 | 190,873,818 | 41 | 14 | BSD-3-Clause | 2023-09-05T06:17:23 | 2019-06-08T10:17:27 | Python | UTF-8 | Python | false | false | 10,443 | py | # !/usr/bin/env python
"""Define the unit tests for the :mod:`colour_datasets.loaders.dyer2017` module."""
import numpy as np
import unittest
from colour_datasets.loaders import DatasetLoader_Dyer2017, build_Dyer2017
__author__ = "Colour Developers"
__copyright__ = "Copyright 2019 Colour Developers"
__license__ = "BSD-3-Clause - https://opensource.org/licenses/BSD-3-Clause"
__maintainer__ = "Colour Developers"
__email__ = "[email protected]"
__status__ = "Production"
__all__ = [
"TestDatasetLoader_Dyer2017",
"TestBuildDyer2017",
]
class TestDatasetLoader_Dyer2017(unittest.TestCase):
"""
Define :class:`colour_datasets.loaders.dyer2017.DatasetLoader_Dyer2017`
class unit tests methods.
"""
def test_required_attributes(self):
"""Test the presence of required attributes."""
required_attributes = ("ID",)
for attribute in required_attributes:
self.assertIn(attribute, dir(DatasetLoader_Dyer2017))
def test_required_methods(self):
"""Test the presence of required methods."""
required_methods = ("__init__", "load")
for method in required_methods:
self.assertIn(method, dir(DatasetLoader_Dyer2017))
def test_load(self):
"""
Test :func:`colour_datasets.loaders.dyer2017.DatasetLoader_Dyer2017.\
load` method.
"""
dataset = DatasetLoader_Dyer2017()
self.assertListEqual(
sorted(dataset.load().keys()),
["camera", "cmf", "illuminant", "training"],
)
np.testing.assert_array_almost_equal(
dataset.load()["camera"]["canon eos 5d mark ii"][555],
np.array(
[
0.165200000000000,
0.802800000000000,
0.028300000000000,
]
),
decimal=7,
)
np.testing.assert_array_almost_equal(
dataset.load()["cmf"]["cie-1931"][555],
np.array(
[
0.512050100000000,
1.000000000000000,
0.005749999000000,
]
),
decimal=7,
)
np.testing.assert_array_almost_equal(
dataset.load()["illuminant"]["iso7589"][555],
np.array([0.485000000000000]),
decimal=7,
)
np.testing.assert_array_almost_equal(
dataset.load()["training"]["190-patch"][555],
np.array(
[
0.016543747000000,
0.089454049000000,
0.775860114000000,
0.199500000000000,
0.589294177000000,
0.426983879000000,
0.299315241000000,
0.195307174000000,
0.113005514000000,
0.065695622000000,
0.030550537000000,
0.185923210000000,
0.138998782000000,
0.253323493000000,
0.116890395000000,
0.059878320000000,
0.386424591000000,
0.242522104000000,
0.042793898000000,
0.039108407000000,
0.340616303000000,
0.109391839000000,
0.024575114000000,
0.013437553000000,
0.165550372000000,
0.044162979000000,
0.038362653000000,
0.050943800000000,
0.060706606000000,
0.017150009000000,
0.030958883000000,
0.294163695000000,
0.094815764000000,
0.013631268000000,
0.011556292000000,
0.102712966000000,
0.014063110000000,
0.088584881000000,
0.019506551000000,
0.049543471000000,
0.216543615000000,
0.148685793000000,
0.426425448000000,
0.066590491000000,
0.185951857000000,
0.161431933000000,
0.046959872000000,
0.337386898000000,
0.044950244000000,
0.186142255000000,
0.217803413000000,
0.176242473000000,
0.180234723000000,
0.573066803000000,
0.396281106000000,
0.130612404000000,
0.489232284000000,
0.086611731000000,
0.482820917000000,
0.285489705000000,
0.390752390000000,
0.553103082000000,
0.761045838000000,
0.448310405000000,
0.751459057000000,
0.296973364000000,
0.845515046000000,
0.600851468000000,
0.790979892000000,
0.116890676000000,
0.471334928000000,
0.796627165000000,
0.318975867000000,
0.365398300000000,
0.663541772000000,
0.243604910000000,
0.817055901000000,
0.746637464000000,
0.142703616000000,
0.060728679000000,
0.244645070000000,
0.525056690000000,
0.125884506000000,
0.159583709000000,
0.333025306000000,
0.099145922000000,
0.115960832000000,
0.142817663000000,
0.105357260000000,
0.154603755000000,
0.136542750000000,
0.235944300000000,
0.322853029000000,
0.636786365000000,
0.478067566000000,
0.357385246000000,
0.233766382000000,
0.313229098000000,
0.470989753000000,
0.219620176000000,
0.087619811000000,
0.181083141000000,
0.237307524000000,
0.134183724000000,
0.052929690000000,
0.335421880000000,
0.355101839000000,
0.051487691000000,
0.225285679000000,
0.208450311000000,
0.137336941000000,
0.069794973000000,
0.311496347000000,
0.655141187000000,
0.092340917000000,
0.446097178000000,
0.595113151000000,
0.051742762000000,
0.308310085000000,
0.218221361000000,
0.459776672000000,
0.483055996000000,
0.209489271000000,
0.270752508000000,
0.581475704000000,
0.150634167000000,
0.162358582000000,
0.576733107000000,
0.327650514000000,
0.341401404000000,
0.153771821000000,
0.402136399000000,
0.079694635000000,
0.068407983000000,
0.534616880000000,
0.183116936000000,
0.171525933000000,
0.037855717000000,
0.168182056000000,
0.559997393000000,
0.144518923000000,
0.108677750000000,
0.075848465000000,
0.106230967000000,
0.271748990000000,
0.108267178000000,
0.363043033000000,
0.041006456000000,
0.031950058000000,
0.173380906000000,
0.359966187000000,
0.044712750000000,
0.100602091000000,
0.175245406000000,
0.061063126000000,
0.258613296000000,
0.026866789000000,
0.197704679000000,
0.543435154000000,
0.113192419000000,
0.267300817000000,
0.135820481000000,
0.154000795000000,
0.045469997000000,
0.408044588000000,
0.011999794000000,
0.047949059000000,
0.052502489000000,
0.065332167000000,
0.151156617000000,
0.132535937000000,
0.037475628000000,
0.138033009000000,
0.210685187000000,
0.265259355000000,
0.523381186000000,
0.105874515000000,
0.164640208000000,
0.109354860000000,
0.437779019000000,
0.024237616000000,
0.144939306000000,
0.297763330000000,
0.178469229000000,
0.312304014000000,
0.327352013000000,
0.026469427000000,
0.431901773000000,
0.015418874000000,
0.158126080000000,
]
),
decimal=7,
)
class TestBuildDyer2017(unittest.TestCase):
"""
Define :func:`colour_datasets.loaders.dyer2017.build_Dyer2017`
definition unit tests methods.
"""
def test_build_Dyer2017(self):
"""
Test :func:`colour_datasets.loaders.dyer2017.build_Dyer2017`
definition.
"""
self.assertIs(build_Dyer2017(), build_Dyer2017())
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
8e633d8ebb598671323b8487afebce2f6f963568 | 26771494974942f4ab18d2cd8247506c344e1d14 | /1-50/003-v2-longestSubstringWithoutRepeatingCharacters.py | 1b623b33716c725631f7c4fd8f5ea735357865ac | [] | no_license | wangyunpengbio/LeetCode | 9f4c6076e067c5e847d662679483f737d40e8ca5 | cec1fd11fe43177abb2d4236782c0f116e6e8bce | refs/heads/master | 2020-04-29T22:28:25.899420 | 2020-04-03T07:37:26 | 2020-04-03T07:37:26 | 176,448,957 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 485 | py | class Solution:
def lengthOfLongestSubstring(self, s: str) -> int:
# 参考答案的滑动窗口方法,用双指针来进行滑动
n = len(s)
myset = set()
ans = 0
i = 0
j = 0
while i < n and j < n:
if s[j] not in myset:
myset.add(s[j])
j = j + 1
ans = max(ans,j-i)
else:
myset.remove(s[i])
i = i + 1
return ans | [
"[email protected]"
] | |
5f23a4262ec4073b1f163b28f7c67f2d5e26d020 | 5a545262f7c053c1cfd1f7984664e3220c745161 | /casper4/griefing_factor_calculator.py | b51af758356363f00bf152f26b683100969d0483 | [
"MIT"
] | permissive | ethereum/research | 2c523e5796cfdb6055e0107dc1768fbf164ecad0 | bb873f8ad0e673803ec6a55be26678e1f99b9ece | refs/heads/master | 2023-09-04T19:11:51.507361 | 2023-08-30T01:52:05 | 2023-08-30T01:52:05 | 42,808,596 | 1,774 | 603 | MIT | 2023-04-21T07:20:21 | 2015-09-20T10:13:12 | Python | UTF-8 | Python | false | false | 2,335 | py | # Computes griefing factors of various parameter sets for Casper the
# Friendly Finality Gadget
# Case 1: <1/3 non-commit (optimal if epsilon participate)
def gf1(x1, x2, x3, x4, x5):
return x2 / x1
# Case 2: censor <1/3 committers (optimal if 1/3 get censored)
def gf2(x1, x2, x3, x4, x5):
return 1.5 * (x1 + x2 / 3) / x2
# Generalized case 2
#k = 0.25
#def gf2(x1, x2, x3, x4, x5):
# return (x1 * k + x2 * k**2) / (x2 * k * (1-k))
# Case 3: <1/3 non-prepare (optimal if epsilon participate)
def gf3(x1, x2, x3, x4, x5):
return x4 / x3
# Case 4: censor <1/3 preparers (optimal if 1/3 get censored)
def gf4(x1, x2, x3, x4, x5):
return 1.5 * (x3 + x4 / 3) / x4
# Case 5: finality preventing 1/3 non-commits
def gf5(x1, x2, x3, x4, x5):
return 2 * (x5 + x2 / 3) / (x5 + x1 + x2 / 3)
# Case 6: censor commits
def gf6(x1, x2, x3, x4, x5):
# Case 6a: 51% participate
return max(1 + x2 / (x5 + x1 + x2 / 2),
# Case 6b: 67% participate
(x5 + x1 + x2 / 3) / (x5 + x2 / 3) / 2)
# Case 7: finality and commit-preventing 1/3 non-prepares
def gf7(x1, x2, x3, x4, x5):
return 2 * (x5 + x4 / 3) / (x5 + x3 + x4 / 3)
gfs = (gf1, gf2, gf3, gf4, gf5, gf6, gf7)
# Get the maximum griefing factor of a set of parameters
def getmax(*args):
return max([f(*args) for f in gfs])
# Get the maximum <50% griefing factor, and enforce a bound
# of MAX_CENSOR_GF on the griefing factor of >50% coalitions
def getmax2(*args):
MAX_CENSOR_GF = 2
if gf2(*args) > MAX_CENSOR_GF or gf4(*args) > MAX_CENSOR_GF or \
gf6(*args) > MAX_CENSOR_GF:
return 999999999999999999
return max(gf1(*args), gf3(*args), gf5(*args), gf7(*args))
# Range to test for each parameter
my_range = [i/12. for i in range(1, 61)]
best_vals = (1, 0, 0, 0, 0)
best_score = 999999999999999999
# print([f(5, 6, 5, 6, 0) for f in gfs])
for x1 in my_range:
for x2 in my_range:
for x3 in my_range:
for x4 in my_range:
o = getmax2(x1, x2, x3, x4, 1)
if o < best_score:
best_score = o
best_vals = (x1, x2, x3, x4, 1)
if o <= 1:
print((x1, x2, x3, x4, 1), [f(x1, x2, x3, x4, 1) for f in gfs])
print('result', best_vals, best_score)
print([f(*best_vals) for f in gfs])
| [
"[email protected]"
] | |
826e890c5538a5e47ee9b6d19b96e2365eb6aab2 | 05caf48bd067c050666026b75686f23d02327378 | /_560.py | de64e26467444d25f8d5aaf3e39947d672b14bd7 | [
"MIT"
] | permissive | elfgzp/Leetcode | 3b6fa307c699fd5a1ba5ea88988c324c33a83eb7 | 964c6574d310a9a6c486bf638487fd2f72b83b3f | refs/heads/master | 2023-08-21T23:11:38.265884 | 2020-10-17T11:55:45 | 2020-10-17T11:55:45 | 168,635,331 | 3 | 0 | MIT | 2023-07-21T03:50:43 | 2019-02-01T03:14:49 | Python | UTF-8 | Python | false | false | 488 | py | class Solution:
def subarraySum(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
"""
res = 0
pre_sum = 0
dic = {0: 1}
for n in nums:
pre_sum += n
if pre_sum - k in dic:
res += dic[pre_sum - k]
dic[pre_sum] = dic.get(pre_sum, 0) + 1
return res
if __name__ == '__main__':
s = Solution()
print(s.subarraySum([1, 1, 1], 2))
| [
"[email protected]"
] | |
0f4025b60d2f552b2859125fbcd22ff802197eb0 | a882ccf759025735f926695d6a5a39937854646a | /c_step16/conf.py | 02c981773f5192a24679711db370473f365f18be | [] | no_license | muzudho/practice-open-cv2 | 5c1534564bcf43c2d8f7a6fb4ee1583bd77337f9 | 55af5cfb37587b08123b404cf8768d83148cb046 | refs/heads/main | 2023-07-08T02:23:22.984816 | 2021-08-10T10:45:01 | 2021-08-10T10:45:01 | 349,864,518 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 385 | py | """設定
* 横幅は 450px 程度以下
* crieitブログに貼りつけるとき、横幅が広すぎると圧縮されて gifアニメ ではなくなってしまう
* ファイルサイズは 2MB 以下
* crieitブログの画像貼付け制限
"""
# グリッド間隔
GRID_UNIT = 16
# 色相環一周分のコマ数
PHASE_COUNTS = 24
# フォント倍率
FONT_SCALE = 0.5
| [
"[email protected]"
] | |
803e0e4dcc3f1532c1b2fb227753c3c4ba7c6bde | a2dc75a80398dee58c49fa00759ac99cfefeea36 | /bluebottle/cms/migrations/0033_auto_20171017_1353.py | b08c6860a74195c4ea8fe4b46ee081f05535c972 | [
"BSD-2-Clause"
] | permissive | onepercentclub/bluebottle | e38b0df2218772adf9febb8c6e25a2937889acc0 | 2b5f3562584137c8c9f5392265db1ab8ee8acf75 | refs/heads/master | 2023-08-29T14:01:50.565314 | 2023-08-24T11:18:58 | 2023-08-24T11:18:58 | 13,149,527 | 15 | 9 | BSD-3-Clause | 2023-09-13T10:46:20 | 2013-09-27T12:09:13 | Python | UTF-8 | Python | false | false | 690 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2017-10-17 11:53
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.manager
class Migration(migrations.Migration):
dependencies = [
('cms', '0032_migrate_projects_3'),
]
operations = [
migrations.AddField(
model_name='slidescontent',
name='sub_title',
field=models.CharField(blank=True, max_length=70, null=True),
),
migrations.AddField(
model_name='slidescontent',
name='title',
field=models.CharField(blank=True, max_length=40, null=True),
),
]
| [
"[email protected]"
] | |
5d375d43bc7c4bc5917a3045e557f480db9b73f0 | 7298d1692c6948f0880e550d6100c63a64ce3ea1 | /catalog-configs/Vocab/ihm_modeling_post_process_feature_term.py | 850ac847e5be209c3b038500354e741d8f1015e4 | [] | no_license | informatics-isi-edu/protein-database | b7684b3d08dbf22c1e7c4a4b8460248c6f0d2c6d | ce4be1bf13e6b1c22f3fccbb513824782609991f | refs/heads/master | 2023-08-16T10:24:10.206574 | 2023-07-25T23:10:42 | 2023-07-25T23:10:42 | 174,095,941 | 2 | 0 | null | 2023-06-16T19:44:43 | 2019-03-06T07:39:14 | Python | UTF-8 | Python | false | false | 6,143 | py | import argparse
from attrdict import AttrDict
from deriva.core import ErmrestCatalog, get_credential, DerivaPathError
from deriva.utils.catalog.components.deriva_model import DerivaCatalog
import deriva.core.ermrest_model as em
from deriva.core.ermrest_config import tag as chaise_tags
from deriva.utils.catalog.manage.update_catalog import CatalogUpdater, parse_args
groups = {
'pdb-admin': 'https://auth.globus.org/0b98092c-3c41-11e9-a8c8-0ee7d80087ee',
'pdb-reader': 'https://auth.globus.org/8875a770-3c40-11e9-a8c8-0ee7d80087ee',
'pdb-writer': 'https://auth.globus.org/c94a1e5c-3c40-11e9-a5d1-0aacc65bfe9a',
'pdb-curator': 'https://auth.globus.org/eef3e02a-3c40-11e9-9276-0edc9bdd56a6',
'isrd-staff': 'https://auth.globus.org/176baec4-ed26-11e5-8e88-22000ab4b42b'
}
table_name = 'ihm_modeling_post_process_feature_term'
schema_name = 'Vocab'
column_annotations = {
'RCT': {
chaise_tags.display: {
'name': 'Creation Time'
},
chaise_tags.generated: None,
chaise_tags.immutable: None
},
'RMT': {
chaise_tags.display: {
'name': 'Last Modified Time'
},
chaise_tags.generated: None,
chaise_tags.immutable: None
},
'RCB': {
chaise_tags.display: {
'name': 'Created By'
},
chaise_tags.generated: None,
chaise_tags.immutable: None
},
'RMB': {
chaise_tags.display: {
'name': 'Modified By'
},
chaise_tags.generated: None,
chaise_tags.immutable: None
},
'ID': {},
'URI': {},
'Name': {},
'Description': {},
'Synonyms': {},
'Owner': {}
}
column_comment = {
'ID': 'The preferred Compact URI (CURIE) for this term.',
'URI': 'The preferred URI for this term.',
'Name': 'The preferred human-readable name for this term.',
'Description': 'A longer human-readable description of this term.',
'Synonyms': 'Alternate human-readable names for this term.',
'Owner': 'Group that can update the record.'
}
column_acls = {}
column_acl_bindings = {}
column_defs = [
em.Column.define(
'ID',
em.builtin_types['ermrest_curie'],
nullok=False,
default='PDB:{RID}',
comment=column_comment['ID'],
),
em.Column.define(
'URI',
em.builtin_types['ermrest_uri'],
nullok=False,
default='/id/{RID}',
comment=column_comment['URI'],
),
em.Column.define(
'Name', em.builtin_types['text'], nullok=False, comment=column_comment['Name'],
),
em.Column.define(
'Description',
em.builtin_types['markdown'],
nullok=False,
comment=column_comment['Description'],
),
em.Column.define('Synonyms', em.builtin_types['text[]'], comment=column_comment['Synonyms'],
),
em.Column.define('Owner', em.builtin_types['text'], comment=column_comment['Owner'],
),
]
visible_columns = {
'*': [
'RID', 'Name', 'Description', 'ID', 'URI',
['Vocab', 'ihm_modeling_post_process_feature_term_RCB_fkey'],
['Vocab', 'ihm_modeling_post_process_feature_term_RMB_fkey'], 'RCT', 'RMT',
['Vocab', 'ihm_modeling_post_process_feature_term_Owner_fkey']
]
}
table_display = {'row_name': {'row_markdown_pattern': '{{{Name}}}'}}
table_annotations = {
chaise_tags.table_display: table_display,
chaise_tags.visible_columns: visible_columns,
}
table_comment = 'A set of controlled vocabular terms.'
table_acls = {}
table_acl_bindings = {
'self_service_group': {
'types': ['update', 'delete'],
'scope_acl': ['*'],
'projection': ['Owner'],
'projection_type': 'acl'
},
'self_service_creator': {
'types': ['update', 'delete'],
'scope_acl': ['*'],
'projection': ['RCB'],
'projection_type': 'acl'
}
}
key_defs = [
em.Key.define(
['RID'], constraint_names=[('Vocab', 'ihm_modeling_post_process_feature_term_RIDkey1')],
),
em.Key.define(
['ID'], constraint_names=[('Vocab', 'ihm_modeling_post_process_feature_term_IDkey1')],
),
em.Key.define(
['URI'], constraint_names=[('Vocab', 'ihm_modeling_post_process_feature_term_URIkey1')],
),
]
fkey_defs = [
em.ForeignKey.define(
['Owner'],
'public',
'Catalog_Group', ['ID'],
constraint_names=[('Vocab', 'ihm_modeling_post_process_feature_term_Owner_fkey')],
acls={
'insert': [groups['pdb-curator']],
'update': [groups['pdb-curator']]
},
acl_bindings={
'set_owner': {
'types': ['update', 'insert'],
'scope_acl': ['*'],
'projection': ['ID'],
'projection_type': 'acl'
}
},
),
em.ForeignKey.define(
['RCB'],
'public',
'ERMrest_Client', ['ID'],
constraint_names=[('Vocab', 'ihm_modeling_post_process_feature_term_RCB_fkey')],
acls={
'insert': ['*'],
'update': ['*']
},
),
em.ForeignKey.define(
['RMB'],
'public',
'ERMrest_Client', ['ID'],
constraint_names=[('Vocab', 'ihm_modeling_post_process_feature_term_RMB_fkey')],
acls={
'insert': ['*'],
'update': ['*']
},
),
]
table_def = em.Table.define(
table_name,
column_defs=column_defs,
key_defs=key_defs,
fkey_defs=fkey_defs,
annotations=table_annotations,
acls=table_acls,
acl_bindings=table_acl_bindings,
comment=table_comment,
provide_system=True
)
def main(catalog, mode, replace=False, really=False):
updater = CatalogUpdater(catalog)
updater.update_table(mode, schema_name, table_def, replace=replace, really=really)
if __name__ == "__main__":
host = 'pdb.isrd.isi.edu'
catalog_id = 5
mode, replace, host, catalog_id = parse_args(host, catalog_id, is_table=True)
catalog = DerivaCatalog(host, catalog_id=catalog_id, validate=False)
main(catalog, mode, replace)
| [
"[email protected]"
] | |
1469b0ca35fbff5011c461f263785d99282f79f6 | 7a2125b1b4712142e7e1cce21f5ffcb14a6033bc | /shh/__main__.py | 664ce9e256b504b70eef92b0c4cac7556be758f5 | [] | no_license | keenhenry/shh | 060127f22bfe37ce7c2f391070184e646e9c82b7 | f4d95dd5341df74195197d8527a4a4e5b0f548b0 | refs/heads/master | 2021-01-17T19:57:01.449859 | 2016-08-07T16:14:25 | 2016-08-07T16:14:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 985 | py | import shh
import time
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument('-p', '--port', default=None, type=int)
parser.add_argument('-k', '--key', default=None, type=str)
parser.add_argument('-s', '--server', action='store_true')
args = parser.parse_args()
if args.port is None:
port = shh.utils.find_port()
else:
port = args.port
print('Local port: {}'.format(port))
print('Creating hidden service...')
hidden = shh.HiddenService(port, key_file=args.key)
print('Serving at: ' + hidden.onion)
if args.server:
try:
from socketserver import TCPServer
except ImportError:
from SocketServer import TCPServer
try:
from http.server import SimpleHTTPRequestHandler
except:
from SimpleHTTPServer import SimpleHTTPRequestHandler
print('Serving current directory')
server = TCPServer(('', port), SimpleHTTPRequestHandler)
server.serve_forever()
else:
while True:
time.sleep(1);
| [
"[email protected]"
] | |
3a14bf609ba29095c5139eff1ced4c4fe38640f0 | 9398d8433fdb29ee630a6ee43a07bc36a2adbd88 | /neutronclient/neutron/v2_0/fw/firewallrule.py | e77e96facdb91dac0a91fec5398a6d4c81fa0b36 | [] | no_license | bopopescu/OpenStack_Liberty_Control | ca5a21d0c32c55dc8c517f5c7c9938ce575a4888 | 0f6ec1b4d38c47776fdf8935266bcaef2464af4c | refs/heads/master | 2022-12-03T10:41:53.210667 | 2016-03-29T06:25:58 | 2016-03-29T06:25:58 | 282,089,815 | 0 | 0 | null | 2020-07-24T01:04:15 | 2020-07-24T01:04:14 | null | UTF-8 | Python | false | false | 5,437 | py | # Copyright 2013 Big Switch Networks
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import argparse
from neutronclient._i18n import _
from neutronclient.common import utils
from neutronclient.neutron import v2_0 as neutronv20
class ListFirewallRule(neutronv20.ListCommand):
"""List firewall rules that belong to a given tenant."""
resource = 'firewall_rule'
list_columns = ['id', 'name', 'firewall_policy_id', 'summary', 'enabled']
pagination_support = True
sorting_support = True
def extend_list(self, data, parsed_args):
for d in data:
val = []
if d.get('protocol'):
protocol = d['protocol'].upper()
else:
protocol = 'no-protocol'
val.append(protocol)
if 'source_ip_address' in d and 'source_port' in d:
src = 'source: ' + str(d['source_ip_address']).lower()
src = src + '(' + str(d['source_port']).lower() + ')'
else:
src = 'source: none specified'
val.append(src)
if 'destination_ip_address' in d and 'destination_port' in d:
dst = 'dest: ' + str(d['destination_ip_address']).lower()
dst = dst + '(' + str(d['destination_port']).lower() + ')'
else:
dst = 'dest: none specified'
val.append(dst)
if 'action' in d:
action = d['action']
else:
action = 'no-action'
val.append(action)
d['summary'] = ',\n '.join(val)
class ShowFirewallRule(neutronv20.ShowCommand):
"""Show information of a given firewall rule."""
resource = 'firewall_rule'
class CreateFirewallRule(neutronv20.CreateCommand):
"""Create a firewall rule."""
resource = 'firewall_rule'
def add_known_arguments(self, parser):
parser.add_argument(
'--name',
help=_('Name for the firewall rule.'))
parser.add_argument(
'--description',
help=_('Description for the firewall rule.'))
parser.add_argument(
'--shared',
dest='shared',
action='store_true',
help=_('Set shared to True (default is False).'),
default=argparse.SUPPRESS)
parser.add_argument(
'--ip-version',
type=int, choices=[4, 6], default=4,
help=_('IP version for the firewall rule (default is 4).'))
parser.add_argument(
'--source-ip-address',
help=_('Source IP address or subnet.'))
parser.add_argument(
'--destination-ip-address',
help=_('Destination IP address or subnet.'))
parser.add_argument(
'--source-port',
help=_('Source port (integer in [1, 65535] or range in a:b).'))
parser.add_argument(
'--destination-port',
help=_('Destination port (integer in [1, 65535] or range in '
'a:b).'))
utils.add_boolean_argument(
parser, '--enabled', dest='enabled',
help=_('Whether to enable or disable this rule.'))
parser.add_argument(
'--protocol', choices=['tcp', 'udp', 'icmp', 'any'],
required=True,
help=_('Protocol for the firewall rule.'))
parser.add_argument(
'--action',
required=True,
choices=['allow', 'deny', 'reject'],
help=_('Action for the firewall rule.'))
def args2body(self, parsed_args):
body = {}
neutronv20.update_dict(parsed_args, body,
['name', 'description', 'shared', 'protocol',
'source_ip_address', 'destination_ip_address',
'source_port', 'destination_port',
'action', 'enabled', 'tenant_id',
'ip_version'])
protocol = parsed_args.protocol
if protocol == 'any':
protocol = None
body['protocol'] = protocol
return {self.resource: body}
class UpdateFirewallRule(neutronv20.UpdateCommand):
"""Update a given firewall rule."""
resource = 'firewall_rule'
def add_known_arguments(self, parser):
parser.add_argument(
'--protocol', choices=['tcp', 'udp', 'icmp', 'any'],
required=False,
help=_('Protocol for the firewall rule.'))
def args2body(self, parsed_args):
body = {}
protocol = parsed_args.protocol
if protocol:
if protocol == 'any':
protocol = None
body['protocol'] = protocol
return {self.resource: body}
class DeleteFirewallRule(neutronv20.DeleteCommand):
"""Delete a given firewall rule."""
resource = 'firewall_rule'
| [
"[email protected]"
] | |
0a7fe6879f0410d3164ee9629b9d1a10ae90c8b7 | 9c87f4c9956ccf1ca2e9f75916fad54d7cafa336 | /harbor/db/hmysql.py | 38b9652bc238b6ac067aa84b7e3863d886a68554 | [] | no_license | zbcbcbc/harbor | 21cbaf1dd5c12d2ca5f56ddaa62355d3a3226f52 | 79d65b2b24dffafd425e423dc54c4810497a7613 | refs/heads/master | 2020-05-24T15:24:13.274030 | 2016-09-04T18:17:26 | 2016-09-04T18:17:26 | 67,360,990 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,522 | py | #coding: utf-8
__filename__ = "h_db.py"
__description__ = "harbor project database wrapper module"
__author__ = "Bicheng Zhang"
__copyright__ = "Copyright 2012-2013, The Harbor Project"
__credits__ = "Bicheng Zhang"
__email__ = "[email protected]"
__version__ = "0.6"
__status__ = "Development"
from twisted.enterprise import adbapi
from twisted.python import log
from txredis.protocol import Redis
from twisted.internet.protocol import ClientCreator
from zope.interface import Interface, Attribute, implements
DB_DRIVER = "MySQLdb"
DB_ARGS = {
'db':'harbor',
'user':'root',
'passwd':'NPC8803zbc'
}
class IHarborDB(Interface):
"""
"""
def query(q):
"""
"""
class HarborDB(object):
def __init__(self):
self.dbpool = adbapi.ConnectionPool(DB_DRIVER, **DB_ARGS)
def query(self, query):
"""
"""
pass
class ReconnectingConnectionPool(adbapi.ConnectionPool):
"""Reconnecting adbapi conection pool for MySQL
see
https://twistedmatrix.com/pipermail/twisted-python/2009-July/0200007.html
"""
def _runInteration(self, interation, *args, **kw):
try:
return adbapi.ConnectionPool._runInteration(self, interation, *args,
**kw)
except MySQLdb.OperationalError, e:
if e[0] not in (2006, 2013):
raise
log.msg("RPC: got error %s, retrying operation" %(e))
conn = self.connections.get(self.threadID())
self.disconnect(conn)
# try the interation again
return adbapi.ConnectionPool._runInteration(self, interation, *args,
**kw)
| [
"[email protected]"
] | |
224c6c23b0e05f14161b6e1183aab61d954fa5b5 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03815/s360092462.py | f1693763337efdfcac142b7361358d714a452a96 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 167 | py |
X = int(input())
cnt = X // 11
X -= 11 * cnt
cnt *= 2
sum = 0
y = 5 if cnt % 2 == 1 else 6
while sum < X:
sum += y
y = 6 if y == 5 else 5
cnt += 1
print(cnt) | [
"[email protected]"
] | |
462a6089b1c31d50bcbbe67b1cf880684fbe42f5 | cc08f8eb47ef92839ba1cc0d04a7f6be6c06bd45 | /Anypa/venv/bin/pip3.7 | 18595d517de33eee50345b4e1e2eab66e7e020f6 | [] | no_license | ProsenjitKumar/PycharmProjects | d90d0e7c2f4adc84e861c12a3fcb9174f15cde17 | 285692394581441ce7b706afa3b7af9e995f1c55 | refs/heads/master | 2022-12-13T01:09:55.408985 | 2019-05-08T02:21:47 | 2019-05-08T02:21:47 | 181,052,978 | 1 | 1 | null | 2022-12-08T02:31:17 | 2019-04-12T17:21:59 | null | UTF-8 | Python | false | false | 255 | 7 | #!/home/prosenjit/PycharmProjects/Anypa/venv/bin/python3.7
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | |
437f70061a056d6b49044f40bfba41ab5068f726 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02946/s592464084.py | 4ce703f0b4b9b0ea601b9b3a100e623de8ea285e | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 83 | py | k, x = map(int, input().split())
print(' '.join(list(map(str, range(x-k+1, x+k))))) | [
"[email protected]"
] | |
c39006b1275d87024fe9d99fbb246ea4b6a57844 | e6476f18faef8210189c5bc6097a0a108265173c | /quadpy/nsimplex/walkington.py | 457a6c5bce2e410fcb09d242cb2da974b6f23349 | [
"MIT"
] | permissive | acimpoeru/quadpy | 4d96ed6fc20fd53148508f8a4a9b657a5d30269d | 0261efd68e4094af31ee7a82c8099f0d88846d5a | refs/heads/master | 2021-04-12T12:15:22.899532 | 2018-02-14T15:59:27 | 2018-02-14T15:59:27 | 126,213,721 | 0 | 1 | MIT | 2018-03-21T17:07:29 | 2018-03-21T17:07:27 | null | UTF-8 | Python | false | false | 4,440 | py | # -*- coding: utf-8 -*-
#
from __future__ import division
from math import factorial
import numpy
import sympy
from ..helpers import untangle
class Walkington(object):
'''
Noel J. Walkington,
Quadrature on simplices of arbitrary dimension,
Technical Report,
CMU, 2000,
<http://www.math.cmu.edu/~nw0z/publications/00-CNA-023/023abs/>.
'''
def __init__(self, d, index, symbolic=False):
frac = sympy.Rational if symbolic else lambda x, y: x/y
sqrt = numpy.vectorize(sympy.sqrt) if symbolic else numpy.sqrt
self.name = 'Walkington({})'.format(index)
self.dim = d
if index == 1:
self.degree = 1
data = [(frac(1, factorial(d)), _c(d, frac))]
elif index == 2:
# The article claims order 2, but tests really only show order 1.
# Also, the article says:
#
# > The points are inside the simplex when the positive square root
# > is selected.
#
# Not sure what this means, but for d>=2, the points are outside
# the simplex.
self.degree = 1
data = [
(frac(1, factorial(d+1)), _xi1(d, 1/sqrt(d+1)))
]
elif index == 3:
self.degree = 3
data = [
(frac(-(d+1)**3, 4 * factorial(d+2)), _c(d, frac)),
(frac(+(d+3)**3, 4 * factorial(d+3)), _xi1(d, frac(1, (d+3)))),
]
elif index == 5:
self.degree = 5
w0 = frac(+(d+1)**5, 32 * factorial(d+3))
w1 = frac(-(d+3)**5, 16 * factorial(d+4))
w2 = frac(+(d+5)**5, 16 * factorial(d+5))
data = [
(w0, _c(d, frac)),
(w1, _xi1(d, frac(1, d+3))),
(w2, _xi1(d, frac(1, d+5))),
(w2, _xi11(d, frac(1, d+5), frac)),
]
else:
assert index == 7
self.degree = 7
w0 = -frac(1, 384) * frac((d+1)**7, factorial(d+4))
w1 = +frac(1, 128) * frac((d+3)**7, factorial(d+5))
w2 = -frac(1, 64) * frac((d+5)**7, factorial(d+6))
w3 = +frac(1, 64) * frac((d+7)**7, factorial(d+7))
data = [
(w0, _c(d, frac)),
(w1, _xi1(d, frac(1, d+3))),
(w2, _xi1(d, frac(1, d+5))),
(w2, _xi11(d, frac(1, d+5), frac)),
(w3, _xi1(d, frac(1, d+7))),
(w3, _xi21(d, frac(1, d+7), frac)),
(w3, _xi111(d, frac(1, d+7), frac)),
]
self.bary, self.weights = untangle(data)
self.points = self.bary[:, 1:]
# normalize weights
self.weights /= numpy.sum(self.weights)
return
def _c(d, frac):
return numpy.array([
numpy.full(d+1, frac(1, d+1))
])
def _xi1(d, a):
out = numpy.full((d+1, d+1), a)
b = 1 - d*a
numpy.fill_diagonal(out, b)
return out
def _xi11(d, a, frac):
assert d > 1
b = frac(1 - (d-1) * a, 2)
if d == 2:
out = numpy.array([
[b, b, a],
[b, a, b],
[a, b, b],
])
else:
assert d == 3
out = numpy.array([
[b, b, a, a],
[b, a, b, a],
[b, a, a, b],
[a, b, a, b],
[a, a, b, b],
[a, b, b, a],
])
return out
def _xi21(d, a, frac):
assert d > 1
b = frac(1 - (d-2) * a, 3)
# ERR Note that the article wrongly states (d-2) the the expression for c.
c = 1 - (d-1) * a - b
if d == 2:
out = numpy.array([
[b, c, a],
[c, b, a],
[c, a, b],
[b, a, c],
[a, b, c],
[a, c, b],
])
else:
assert d == 3
out = numpy.array([
[b, c, a, a],
[b, a, c, a],
[b, a, a, c],
[a, b, a, c],
[a, a, b, c],
[a, b, c, a],
[c, b, a, a],
[c, a, b, a],
[c, a, a, b],
[a, c, a, b],
[a, a, c, b],
[a, c, b, a],
])
return out
def _xi111(d, a, frac):
assert d == 3
b = frac(1 - (d-2) * a, 3)
out = numpy.array([
[b, b, b, a],
[b, b, a, b],
[b, a, b, b],
[a, b, b, b],
])
return out
| [
"[email protected]"
] | |
7b2fd7df1cb13035e43f24f46fef589ad5e91ab3 | 9d1238fb0e4a395d49a7b8ff745f21476c9d9c00 | /framework/Tests/PAS/PAS/GeneralSecrets/SecretsV2Folders/MemberPermissions/API/test_member_add_single_folder_then_append_multilevel_folder_to_it.py | 46fe3f13b3d24f03c4ad6e4479b9cd1613e66fe3 | [] | no_license | jaspalsingh92/TestAutomation-1 | a48ee1d3b73386f1bf8f53328a5b55444238e054 | e631c67255b10f150e0012991fb1474ede904417 | refs/heads/master | 2023-04-18T14:52:08.836221 | 2021-04-07T12:01:07 | 2021-04-07T12:01:07 | 357,175,690 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,848 | py | import pytest
import logging
from Shared.API.secret import create_folder, get_folder, get_secrets_and_folders_in_folders,\
give_user_permissions_to_folder
from Shared.API.sets import SetsManager
logger = logging.getLogger('test')
@pytest.mark.api
@pytest.mark.pas
@pytest.mark.pasapi
@pytest.mark.bhavna
def test_member_add_single_folder_then_append_multilevel_folder_to_it(core_session,
pas_general_secrets,
cleanup_secrets_and_folders,
users_and_roles,
create_secret_folder):
"""
C3059: test method to Add single folder then append multilevel folder to it
1) create multilevel folder dogs/labradors/yellow inside a parent folder
2) Login as Admin, set folder permissions "View" for parent folder
3) Login as pas user
4) verify pas user can view all folders i.e. "folder1/dogs/labradors/yellow" & inherit permissions from parent.
:param core_session: Authenticated Centrify session
:param pas_general_secrets: Fixture to read secrets related data from yaml file
:param cleanup_secrets_and_folders: Fixture to cleanup the secrets & folders created
:param users_and_roles: Fixture to create random user with PAS User Rights
:param create_secret_folder: Fixture to create folder & yields folder details
"""
params = pas_general_secrets
folders_list = cleanup_secrets_and_folders[1]
folder_parameters = create_secret_folder
parent_folder_id = folder_parameters['ID']
# creating multilevel folder dogs/labradors/yellow
child_folder_success, child_folder_parameters, child_folder_id = create_folder(
core_session,
params['folder_multiple_level'],
params['description'],
parent=parent_folder_id)
assert child_folder_success, f'Failed to create multilevel folder, API response result: {child_folder_id}'
logger.info(f'Multilevel Folder created successfully, details are: {child_folder_parameters}')
# Getting details of Folder Labradors
labradors_folder = get_folder(core_session, child_folder_id)
logger.info(f'labradors folder details:{labradors_folder}')
labradors_folder_id = labradors_folder['Result']['Results'][0]['Row']['Parent']
# Getting id of Folder Dogs
dogs_folder = get_folder(core_session, labradors_folder_id)
logger.info(f'Dogs folder details:{dogs_folder}')
dogs_folder_id = dogs_folder['Result']['Results'][0]['Row']['Parent']
# API to get new session for User A
pas_power_user_session = users_and_roles.get_session_for_user('Privileged Access Service User')
assert pas_power_user_session.auth_details, 'Failed to Login with PAS User'
user_name = pas_power_user_session.auth_details['User']
user_id = pas_power_user_session.auth_details['UserId']
logger.info(f'User with PAS User Rights login successfully: user_Name:{user_name}')
# Api to give user permissions to parent folder
user_permissions_alpha = give_user_permissions_to_folder(core_session,
user_name,
user_id,
parent_folder_id,
'View')
assert user_permissions_alpha['success'], \
f'Not Able to set user permissions to folder, API response result:{user_permissions_alpha["Result"]}'
logger.info(f'User Permissions to folder: {user_permissions_alpha}')
# Getting id of Folder Dog
dog_folder = get_secrets_and_folders_in_folders(pas_power_user_session, parent_folder_id)
logger.info(f'Details of Dog Folder Retrieved with pas user:{dog_folder}')
dog_id = dog_folder["Result"]["Results"][0]["Entities"][0]["Key"]
assert dog_id == dogs_folder_id, \
f'Failed to view dog folder with pas user, API response result:{dog_folder["success"]} & {dog_folder["Result"]}'
# Getting id of parent folder
labradors_folder = get_secrets_and_folders_in_folders(pas_power_user_session, dog_id)
logger.info(f'Details of labradors Folder Retrieved with pas user:{labradors_folder}')
labradors_id = labradors_folder["Result"]["Results"][0]["Entities"][0]["Key"]
assert labradors_id == labradors_folder_id, \
f'Failed to view labradors folder with pas user, API response result:' \
f'{labradors_folder["success"]} & {labradors_folder["Result"]}'
# Getting id of parent folder
yellow_folder = get_secrets_and_folders_in_folders(pas_power_user_session, labradors_id)
logger.info(f'Details of yellow Folder Retrieved with pas user:{yellow_folder}')
yellow_id = yellow_folder["Result"]["Results"][0]["Entities"][0]["Key"]
assert \
yellow_id == child_folder_id, f' Failed to view yellow folder with pas user, API response result:' \
f'{yellow_folder["success"]} & {yellow_folder["Result"]}'
# Getting permissions of the folder yellow(should inherit from parent)
permissions_yellow = SetsManager.get_collection_rights(pas_power_user_session, child_folder_id)
verify_permissions_all = 'View'
assert verify_permissions_all == permissions_yellow["Result"], \
f'Failed to verify permissions for the folder, API response result:{permissions_yellow["Result"]}'
logger.info(f'Permissions of the folder created: {permissions_yellow}')
# cleanup of folders accordingly
folders_list.insert(0, child_folder_id)
folders_list.insert(1, labradors_folder_id)
folders_list.insert(2, dogs_folder_id)
| [
"[email protected]"
] | |
bbbb3283bf2aa13ee7af8d20d65b760414fc42f7 | 08e2ed7fb3a3080c8cdc46cf7e4cbb2a6e60f90a | /src/game_object/components/image_component.py | 20c402c520e23640a7a37d13e1d5c3a1f98ea467 | [] | no_license | thydungeonsean/_rainbowmancer | 1630b60983719dde77cd1dea267dd15dde855c38 | cebaf66f5c69f60f8b6c38492f19b8f1e32f73fe | refs/heads/master | 2021-04-28T07:35:06.183408 | 2018-03-19T19:55:47 | 2018-03-19T19:55:47 | 122,226,506 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,271 | py | from game_object_component import GameObjectComponent
from src.data_structures.vector import Vector
from src.image.tile_image import TileImage
class ImageComponent(GameObjectComponent):
A = 0
B = 1
def __init__(self, owner, image_id, animated=True):
GameObjectComponent.__init__(self, owner)
self.image_id = image_id
self.images = self.load_images(animated)
@property
def frame(self):
return self.game_state.frame
@property
def color_id(self):
return self.images[0].color_id
def load_images(self, animated):
images = {
ImageComponent.A: TileImage(self.image_id),
ImageComponent.B: TileImage(self.image_id, animated_frame=animated)
}
return images
def position(self, (x, y)):
for i in (0, 1):
self.images[i].position((x, y))
def draw(self, surface):
image = self.images[self.frame]
image.draw(surface)
def change_color(self, new_color):
if new_color != self.color_id:
for i in (0, 1):
self.images[i].change_color(new_color)
@property
def w(self):
return self.images[0].w
@property
def h(self):
return self.images[0].h
| [
"[email protected]"
] | |
6295e3e515ae5835cbede861390b080d25f8b017 | a3a898a42049da56bbda00adf4cd781f4ffcce5f | /ut.py | 661e7e7207fdf62cf6f932036333775b0dad9df4 | [] | no_license | zhanglintc/algorithm | 682b282b952a4db393c5f2aecaf9d3e7c792d635 | 5bf955aa62ca7728a26fc0613940839121876f29 | refs/heads/main | 2023-03-21T20:00:03.989144 | 2021-03-24T10:03:35 | 2021-03-24T10:03:35 | 349,449,071 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,969 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
# -*- mode: python -*-
# vi: set ft=python :
import unittest
class BaseTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('######################################################################')
print("{0}:".format(cls.__name__))
class Base64Test(BaseTestCase):
def test_base64(self):
import base64 as base64
import b64.b64 as b64
# encode
for c in 'AZaz09+-':
self.assertEqual(base64.b64encode(c.encode()), b64.base64_encode(c.encode()))
self.assertEqual(base64.b64encode(b'Man'), b64.base64_encode(b'Man'))
self.assertEqual(base64.b64encode(b'any carnal pleasure.'), b64.base64_encode(b'any carnal pleasure.'))
# decode
self.assertEqual(base64.b64decode('QQ=='), b64.base64_decode('QQ=='))
self.assertEqual(base64.b64decode('TWFu'), b64.base64_decode('TWFu'))
class AlgorithmSortTest(BaseTestCase):
def __new__(cls, *args, **kwargs):
import sort.sort as _sort
cls._sort = _sort
import random
cls._array = [random.randint(0, 100000) for _ in range(3000)]
instance = super(cls.__class__, cls).__new__(cls)
return instance
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
# @unittest.skip('pass')
def sort_test_helper(self, func):
print()
pairs = [
([], []),
([1], [1]),
([x for x in range(1000, -1, -1)], [x for x in range(1000, -1, -1)]),
(self._array[:], self._array[:]),
]
for pair in pairs:
by_testee, by_system = pair
func(by_testee); by_system.sort()
self.assertEqual(by_testee, by_system)
def test_swap_sort(self):
self.sort_test_helper(self._sort.swap_sort)
def test_bubble_sort(self):
self.sort_test_helper(self._sort.bubble_sort)
def test_selection_sort(self):
self.sort_test_helper(self._sort.selection_sort)
def test_insertion_sort(self):
self.sort_test_helper(self._sort.insertion_sort)
def test_shell_sort(self):
self.sort_test_helper(self._sort.shell_sort)
def test_heap_sort(self):
self.sort_test_helper(self._sort.heap_sort)
def test_merge_sort(self):
self.sort_test_helper(self._sort.merge_sort)
def test_quick_sort(self):
self.sort_test_helper(self._sort.quick_sort)
def test_counting_sort(self):
self.sort_test_helper(self._sort.counting_sort)
self.sort_test_helper(self._sort.counting_sort_stable)
if __name__ == '__main__':
runner = unittest.TextTestRunner(verbosity=2)
load_class = unittest.TestLoader().loadTestsFromTestCase
suites = [
load_class(Base64Test),
load_class(AlgorithmSortTest),
]
result = [runner.run(suite) for suite in suites]
list(map(print, result))
| [
"[email protected]"
] | |
1d4be0626cf2e87afbb3890b9c2b4fdd4d4312e2 | a2b6bc9bdd2bdbe5871edb613065dd2397175cb3 | /简单/二叉搜索树中的众数.py | e059cf51f0fc83293f2a71e5defab5ed13b51d62 | [] | no_license | Asunqingwen/LeetCode | ed8d2043a31f86e9e256123439388d7d223269be | b7c59c826bcd17cb1333571eb9f13f5c2b89b4ee | refs/heads/master | 2022-09-26T01:46:59.790316 | 2022-09-01T08:20:37 | 2022-09-01T08:20:37 | 95,668,066 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,407 | py | '''
给定一个有相同值的二叉搜索树(BST),找出 BST 中的所有众数(出现频率最高的元素)。
假定 BST 有如下定义:
结点左子树中所含结点的值小于等于当前结点的值
结点右子树中所含结点的值大于等于当前结点的值
左子树和右子树都是二叉搜索树
例如:
给定 BST [1,null,2,2],
1
\
2
/
2
返回[2].
提示:如果众数超过1个,不需考虑输出顺序
进阶:你可以不使用额外的空间吗?(假设由递归产生的隐式调用栈的开销不被计算在内)
'''
from typing import List
from Tree import TreeNode, stringToTreeNode
from collections import defaultdict
class Solution:
def findMode(self, root: TreeNode) -> List[int]:
res = []
if not root:
return res
hashMap = defaultdict(int)
def dfs(node):
if not node:
return
if node.left:
dfs(node.left)
hashMap[node.val] += 1
if node.right:
dfs(node.right)
dfs(root)
max_count = max(hashMap.values())
for k, v in hashMap.items():
if v == max_count:
res.append(k)
return res
if __name__ == '__main__':
nums = "2147483647"
root = stringToTreeNode(nums)
sol = Solution()
print(sol.findMode(root))
| [
"[email protected]"
] | |
180b6a0cc640d56c92a9f96dbc23bfee1f9c0179 | 05e67dc917da938b5b6857d7f21c3b87f939de91 | /Lib/console.py | 1dc17b587bc978eeaa7b8ee00704afc8158132af | [
"MIT"
] | permissive | Aareon/Pyto | a117c7d5634698d1f0d44c9771c9e85870effa70 | 46af1ebf4f54fd90eacc3bb548f8717b4bd55bbe | refs/heads/main | 2023-05-19T14:49:18.217913 | 2021-06-08T03:54:17 | 2021-06-08T03:54:17 | 349,201,563 | 1 | 0 | MIT | 2021-06-12T04:01:16 | 2021-03-18T19:58:31 | null | UTF-8 | Python | false | false | 23,631 | py | # -*- coding: utf-8 -*-
"""
Some Pyto core functions
"""
# (!) Warning (!)
# The following code is horrible
# Protect your eyes
# Good luck
import os
from pyto import *
from pyto import __isMainApp__, __Class__
import os
import sys
import traceback
import threading
import time
from extensionsimporter import __UpgradeException__
if "widget" not in os.environ:
from code import interact, InteractiveConsole
import importlib.util
from importlib import reload
import builtins
import pdb
from colorama import Fore, Back, Style
from json import dumps
from collections.abc import Mapping
from types import ModuleType as Module
import stopit
import asyncio
import gc
from time import sleep
from rubicon.objc import ObjCInstance
from Foundation import NSObject
import warnings
try:
from rubicon.objc import *
except ValueError:
def ObjCClass(class_name):
return None
try:
import pyto_core as pc
except ImportError:
pass
def displayhook(_value):
if _value is None:
return
def represent(value):
if hasattr(value, "__dict__"):
val = {}
_dict = dict(value.__class__.__dict__)
_dict.update(dict(value.__dict__))
for key in list(_dict):
item = _dict[key]
if item is value or (
not isinstance(item, dict) and not isinstance(item, list)
):
item = repr(item)
else:
item = represent(item)
val[key] = item
elif isinstance(value, Mapping):
val = {}
_dict = value
for key in list(_dict):
item = _dict[key]
if item is value or (
not isinstance(item, dict) and not isinstance(item, list)
):
item = repr(item)
else:
item = represent(item)
val[repr(key)] = item
elif isinstance(value, list):
val = {}
i = 0
for item in value:
_item = item
if item is value or (
not isinstance(item, dict) and not isinstance(item, list)
):
val[str(i)] = repr(item)
else:
val[str(i)] = represent(item)
i += 1
else:
val = repr(value)
return val
_repr = repr(_value)
if isinstance(_value, tuple):
_value = list(_value)
if isinstance(
_value, ObjCInstance
): # We assume it's an instance or subclass of NSObject
_repr = str(_value.debugDescription)
val = {
"Description": _repr,
"Superclass": str(_value.superclass.name),
"Methods": str(_value._methodDescription()),
}
elif (
isinstance(_value, Mapping)
or isinstance(_value, list)
or hasattr(_value, "__dict__")
):
val = represent(_value)
else:
val = represent([_value])
def default(o):
return repr(o)
json = dumps(val, default=default)
try:
PyOutputHelper.printValue(
_repr + "\n", value=json, script=threading.current_thread().script_path
)
except AttributeError:
PyOutputHelper.printValue(_repr + "\n", value=json, script=None)
def excepthook(exc, value, tb, limit=None):
if isinstance(value, __UpgradeException__):
builtins.print(value)
return
message = traceback.format_exception(exc, value, tb, limit=limit)
if limit is None:
del message[1] # Remove the first element of the traceback in the REPL
for part in message:
if part == message[0]: # Traceback (most recent blah blah blah)
msg = Fore.RED + part + Style.RESET_ALL
elif part == message[-1]: # Exception: message
parts = part.split(":")
parts[0] = Fore.RED + parts[0] + Style.RESET_ALL
msg = ":".join(parts)
elif part.startswith(" File"): # File "file", line 1, in function
parts = part.split('"')
parts[1] = Fore.YELLOW + parts[1] + Style.RESET_ALL
parts = '"'.join(parts).split("\n")
first_line = parts[0].split(" ")
first_line[-1] = Fore.YELLOW + first_line[-1] + Style.RESET_ALL
parts[0] = " ".join(first_line)
msg = "\n".join(parts)
else:
msg = part
builtins.print(msg, file=sys.stderr, end="")
__repl_namespace__ = {}
__repl_threads__ = {}
def __runREPL__(repl_name="", namespace={}, banner=None):
if "widget" in os.environ:
return
sys.excepthook = excepthook
sys.displayhook = displayhook
__repl_namespace__[repl_name] = {
"clear": ClearREPL(),
"__name__": repl_name.split(".")[0],
"__file__": repl_name,
}
__repl_namespace__[repl_name].update(namespace)
__namespace__ = __repl_namespace__[repl_name]
def read(prompt):
import console
__namespace__.update(console.__repl_namespace__[repl_name])
return input(prompt, highlight=True)
sys.argv = [""]
Python.shared.isScriptRunning = True
if banner is None:
banner = f"Python {sys.version}\n{str(__Class__('MenuTableViewController').pytoVersion)}\nType \"help\", \"copyright\", \"credits\" or \"license\" for more information.\nType \"clear()\" to clear the console."
interact(readfunc=read, local=__namespace__, banner=banner)
# MARK: - Running
def __clear_mods__():
try:
del sys.modules["pip"]
except KeyError:
pass
try:
del sys.modules["pdb"]
except KeyError:
pass
try:
del sys.modules["logging"]
del sys.modules["logging.config"]
del sys.modules["logging.handlers"]
except KeyError:
pass
try:
del sys.modules["pyto_ui"]
except KeyError:
pass
try:
del sys.modules["pyto_core"]
except KeyError:
pass
try:
del sys.modules["ui_constants"]
except KeyError:
pass
try:
del sys.modules["watch"]
except KeyError:
pass
try:
del sys.modules["widgets"]
except KeyError:
pass
try:
del sys.modules["turtle"]
except KeyError:
pass
try:
_values = sys.modules["_values"]
for attr in dir(_values):
if attr not in _values._dir:
delattr(_values, attr)
except:
pass
try:
if "matplotlib" in sys.modules:
import matplotlib.pyplot as plt
plt.close()
plt.clf()
except:
pass
keys = list(sys.modules.keys())
for key in keys:
try:
mod = sys.modules[key]
if (
os.access(mod.__file__, os.W_OK)
and not "/Library/python38" in mod.__file__
and key != "<run_path>"
):
del sys.modules[key]
except AttributeError:
pass
except TypeError:
pass
except KeyError:
pass
__widget_id__ = None
if "widget" not in os.environ:
__script__ = None
__is_loop_running__ = False
__i__ = 0
__breakpoints__ = []
__are_breakpoints_set__ = True
def run_script(path, replMode=False, debug=False, breakpoints=[], runREPL=True):
"""
Run the script at given path catching exceptions.
This function should only be used internally by Pyto.
Args:
path: The path of the script.
replMode: If set to `True`, errors will not be handled.
debug: Set to `True` for debugging.
breakpoints: Lines to break if debugging.
runREPL: Set it to `True` for running the REPL.
"""
__repl_namespace__[path.split("/")[-1]] = {}
__clear_mods__()
python = Python.shared
python.addScriptToList(path)
if PyCallbackHelper is not None:
PyCallbackHelper.exception = None
is_watch_script = False
if path == str(Python.watchScriptURL.path):
is_watch_script = True
currentDir = ""
try:
currentDir = str(python.currentWorkingDirectory)
except:
currentDir = os.path.expanduser(os.path.dirname(path))
try:
del os.environ["ps1"]
except KeyError:
pass
try:
del os.environ["ps2"]
except KeyError:
pass
sys.argv = [path]
for arg in python.args:
if arg != "":
sys.argv.append(str(arg))
d = os.path.expanduser("~/tmp")
filesToRemove = []
try:
filesToRemove = [os.path.join(d, f) for f in os.listdir(d)]
except:
pass
try:
filesToRemove.remove(d + "/Script.py")
except:
pass
try:
filesToRemove.remove(d + "/Watch.py")
except:
pass
for f in filesToRemove:
if f.endswith(".repl.py"):
continue
if f.endswith(".tmp"):
continue
try:
os.remove(f)
except PermissionError:
pass
# Kill the REPL running for this script
global __repl_threads__
if path in __repl_threads__:
Python.shared.interruptInputWithScript(path)
thread = __repl_threads__[path]
for tid, tobj in threading._active.items():
if tobj is thread:
try:
stopit.async_raise(tid, SystemExit)
break
except:
pass
del __repl_threads__[path]
def run():
def add_signal_handler(s, f):
return
loop = asyncio.new_event_loop()
loop.add_signal_handler = add_signal_handler
asyncio.set_event_loop(loop)
pip_directory = os.path.expanduser("~/Documents/site-packages")
Python.shared.isScriptRunning = True
os.chdir(currentDir)
try:
sys.path.remove(pip_directory)
except:
pass
sys.path.insert(-1, currentDir)
sys.path.insert(-1, pip_directory)
try:
global __script__
spec = importlib.util.spec_from_file_location("__main__", path)
__script__ = importlib.util.module_from_spec(spec)
sys.modules["__main__"] = __script__
if debug and "widget" not in os.environ:
try:
console
except:
import console
console.__are_breakpoints_set__ = False
console.__breakpoints__ = breakpoints
console.__i__ = -1
old_input = input
def debugger_input(prompt):
try:
console
except:
import console
if not console.__are_breakpoints_set__:
breakpoints = console.__breakpoints__
console.__i__ += 1
if len(breakpoints) < console.__i__:
console.__are_breakpoints_set__ = True
return ""
try:
breakpoints[console.__i__ + 1]
except:
console.__are_breakpoints_set__ = True
return "b " + str(breakpoints[console.__i__])
else:
console.__should_inspect__ = True
return old_input(prompt)
if len(breakpoints) > 0:
builtins.input = debugger_input
pdb.main(["pdb", path])
builtins.input = old_input
loop.close()
else:
spec.loader.exec_module(__script__)
loop.close()
return (path, vars(__script__), None)
except SystemExit:
if PyCallbackHelper is not None:
PyCallbackHelper.cancelled = True
loop.close()
return (path, vars(__script__), SystemExit)
except KeyboardInterrupt:
if PyCallbackHelper is not None:
PyCallbackHelper.cancelled = True
loop.close()
return (path, vars(__script__), KeyboardInterrupt)
except Exception as e:
if PyCallbackHelper is not None:
PyCallbackHelper.exception = str(e)
loop.close()
if not __isMainApp__() or replMode:
print(traceback.format_exc())
if not replMode:
Python.shared.fatalError(traceback.format_exc())
else:
exc_type, exc_obj, exc_tb = sys.exc_info()
extracts = traceback.extract_tb(exc_tb)
count = len(extracts)
lineNumber = -1
fileName = path
for i, extract in enumerate(extracts):
if extract[0] == fileName:
lineNumber = extract[1]
break
count -= 1
if (
type(e) == SyntaxError
): # The last word in a `SyntaxError` exception is the line number
lineNumber = [
int(s) for s in (str(e)[:-1]).split() if s.isdigit()
][-1]
Python.shared.errorType = exc_type.__name__
Python.shared.errorReason = str(e)
for console in ConsoleViewController.objcVisibles:
if (
console.editorSplitViewController.editor.document.fileURL.path
!= path
):
continue
console.editorSplitViewController.editor.showErrorAtLine(
lineNumber
)
excepthook(exc_type, exc_obj, exc_tb, -count)
try:
PyOutputHelper.printError(
"", script=threading.current_thread().script_path
)
except AttributeError:
PyOutputHelper.printError("", script=None)
error = traceback.format_exc(limit=-count)
if "cv2.error" in error and "!_src.empty()" in error:
string = "\nOn Pyto, 'cv2.VideoCapture.read' may return an invalid value the first time. If you are running a loop for capturing a video from the camera, check if the return value is valid before using it. See the 'OpenCV/face_detection.py' example.\n"
try:
PyOutputHelper.printError(
string, script=threading.current_thread().script_path
)
except AttributeError:
PyOutputHelper.printError(string, script=None)
sys.path.remove(currentDir)
if debug:
pdb.post_mortem(exc_tb)
return (path, vars(__script__), e)
if __isMainApp__():
EditorViewController.runningLine = 0
ConsoleViewController.enableDoneButton()
ReviewHelper.shared.launches = ReviewHelper.shared.launches + 1
ReviewHelper.shared.requestReview()
Python.shared.isScriptRunning = True
Python.shared._isScriptRunning = True
def run_repl(t):
global __repl_threads__
Python.shared._isScriptRunning = False
Python.shared.isScriptRunning = False
Python.shared.removeScriptFromList(path)
if path.endswith(".repl.py") or not runREPL:
return
if type(t) is tuple and len(t) == 3 and not is_watch_script:
__repl_threads__[t[0]] = threading.current_thread()
__runREPL__(t[0].split("/")[-1], t[1], "")
_script = None
if (
"__editor_delegate__" in dir(builtins)
and builtins.__editor_delegate__ is not None
):
delegate = builtins.__editor_delegate__
def _run():
import builtins
delegate = builtins.__editor_delegate__
t = run()
if type(t) is tuple and len(t) == 3:
try:
delegate.did_run_script(t[0], t[1], t[2])
except NotImplementedError:
run_repl(t)
except SystemExit:
run_repl(t)
except KeyboardInterrupt:
run_repl(t)
except Exception:
traceback.print_tb()
try:
delegate.run_script(path, _run)
except NotImplementedError:
run_repl(_run())
else:
# Return the script's __dict__ for the Xcode template
t = run()
if Python.shared.tooMuchUsedMemory:
del t
elif __isMainApp__():
run_repl(t)
else:
_script = t[1]
Python.shared._isScriptRunning = False
Python.shared.isScriptRunning = False
Python.shared.removeScriptFromList(path)
sys.path = list(dict.fromkeys(sys.path)) # I don't remember why 😭
if "widget" not in os.environ:
import watch
watch.__show_ui_if_needed__()
__clear_mods__()
# time.sleep(0.2)
if Python.shared.tooMuchUsedMemory:
Python.shared.runBlankScript()
return _script
# MARK: - I/O
ignoredThreads = []
"""
All output and input request from these threads will be ignored.
"""
class ClearREPL:
def __repr__(self):
return self.__str__()
def __str__(self):
return "Type 'clear()' to clear the console."
def __call__(self):
print(u"{}[2J{}[;H".format(chr(27), chr(27)), end="")
print(chr(27) + "[3J", end="")
def clear():
"""
Clears the console.
"""
if threading.current_thread() in ignoredThreads:
return
print(u"{}[2J{}[;H".format(chr(27), chr(27)), end="")
print(chr(27) + "[3J", end="")
msg = "'clear()' was deprecated in Pyto 16.1 since the terminal supports more escape sequences. You should just print the adequate escape sequences to clear the terminal."
warnings.warn(msg, DeprecationWarning)
__PyInputHelper__ = PyInputHelper
def input(prompt: str = None, highlight=False):
"""
Requests input with given prompt.
:param prompt: Text printed before the user's input without a newline.
:param highlight: A boolean indicating whether the line should be syntax colored.
"""
if "widget" in os.environ:
return None
if prompt is None:
prompt = ""
print(prompt, end="")
try:
path = threading.current_thread().script_path
except AttributeError:
path = ""
try:
__PyInputHelper__.showAlertWithPrompt(
prompt, script=threading.current_thread().script_path, highlight=highlight
)
except AttributeError:
__PyInputHelper__.showAlertWithPrompt(prompt, script=None, highlight=highlight)
userInput = __PyInputHelper__.waitForInput(path)
if userInput == "<WILL INTERRUPT>": # Will raise KeyboardInterrupt, don't return
while True:
time.sleep(0.2)
return str(userInput)
def print(*objects, sep: str = None, end: str = None):
"""
Prints to the Pyto console, not to the stdout. Works as the builtin `print` function but does not support printing to a custom file. Pyto catches by default the stdout and the stderr, so use the builtin function instead. This function is mainly for internal use.
"""
if sep is None:
sep = " "
if end is None:
end = "\n"
array = map(str, objects)
printed = sep.join(array) + end
try:
if objects[0].__class__ is str:
printed = objects[0]
except:
pass
try:
PyOutputHelper.print(printed, script=threading.current_thread().script_path)
except AttributeError:
PyOutputHelper.print(printed, script=None)
# MARK: - Alerts
if "widget" not in os.environ:
PyAlert = PyAlert
"""
A class representing an alert.
Example:
code-block::
python
alert = console.Alert.alertWithTitle("Hello", message="Hello World!")
alert.addAction("Ok")
alert.addCancelAction("Cancel")
if (alert.show() == "Ok"):
print("Good Bye!")
"""
class Alert:
"""
A wrapper of ``UIAlert``.
"""
pyAlert = None
def __init__(self):
self.pyAlert = PyAlert.alloc().init()
@staticmethod
def alertWithTitle(title: str, message: str) -> "Alert":
"""
Creates an alert.
:param title: The title of the alert.
:param message: The message of the alert.
"""
alert = Alert()
alert.pyAlert.title = title
alert.pyAlert.message = message
return alert
__actions__ = []
def addAction(self, title: str):
"""
Add an action with given title.
:param title: The title of the action.
"""
self.pyAlert.addAction(title)
def addDestructiveAction(self, title: str):
"""
Add a destructive action with given title.
:param title: The title of the action.
"""
self.pyAlert.addDestructiveAction(title)
def addCancelAction(self, title: str):
"""
Add a cancel action with given title. Can only added once.
:param title: The title of the action.
"""
if not self.pyAlert.addCancelAction(title):
raise ValueError("There is already a cancel action.")
def show(self) -> str:
"""
Shows alert.
Returns the title of the selected action.
:rtype: str
"""
path = None
try:
path = threading.current_thread().script_path
except AttributeError:
pass
return self.pyAlert._show(path)
else:
PyAlert = None
Alert = None
__all__ = ["Alert", "clear", "print", "input"]
| [
"[email protected]"
] | |
58a597d209c8dd7f91daf0f218f82acf4199fd8c | 06984002a22f41b6eb63f9bdf3eb3529792d766f | /trunk/swift/test/unit/common/test_db.py | 48b02d948060f886a636e1dc8c11abff122b4be1 | [
"Apache-2.0"
] | permissive | lixmgl/Intern_OpenStack_Swift | d6195c25cd59dfe603203f727ed409a61891a3bf | 40c241319c6b9a7aabacc9d927486864d13b8055 | refs/heads/master | 2020-04-14T20:40:15.496239 | 2015-08-06T22:24:38 | 2015-08-06T22:24:38 | 40,329,555 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 91,989 | py | # Copyright (c) 2010-2011 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Tests for swift.common.db """
from __future__ import with_statement
import hashlib
import os
import unittest
from shutil import rmtree, copy
from StringIO import StringIO
from time import sleep, time
from uuid import uuid4
import simplejson
import sqlite3
import swift.common.db
from swift.common.db import AccountBroker, chexor, ContainerBroker, \
DatabaseBroker, DatabaseConnectionError, dict_factory, get_db_connection
from swift.common.utils import normalize_timestamp
from swift.common.exceptions import LockTimeout
class TestDatabaseConnectionError(unittest.TestCase):
def test_str(self):
err = \
DatabaseConnectionError(':memory:', 'No valid database connection')
self.assert_(':memory:' in str(err))
self.assert_('No valid database connection' in str(err))
err = DatabaseConnectionError(':memory:',
'No valid database connection', timeout=1357)
self.assert_(':memory:' in str(err))
self.assert_('No valid database connection' in str(err))
self.assert_('1357' in str(err))
class TestDictFactory(unittest.TestCase):
def test_normal_case(self):
conn = sqlite3.connect(':memory:')
conn.execute('CREATE TABLE test (one TEXT, two INTEGER)')
conn.execute('INSERT INTO test (one, two) VALUES ("abc", 123)')
conn.execute('INSERT INTO test (one, two) VALUES ("def", 456)')
conn.commit()
curs = conn.execute('SELECT one, two FROM test')
self.assertEquals(dict_factory(curs, curs.next()),
{'one': 'abc', 'two': 123})
self.assertEquals(dict_factory(curs, curs.next()),
{'one': 'def', 'two': 456})
class TestChexor(unittest.TestCase):
def test_normal_case(self):
self.assertEquals(chexor('d41d8cd98f00b204e9800998ecf8427e',
'new name', normalize_timestamp(1)),
'4f2ea31ac14d4273fe32ba08062b21de')
def test_invalid_old_hash(self):
self.assertRaises(TypeError, chexor, 'oldhash', 'name',
normalize_timestamp(1))
def test_no_name(self):
self.assertRaises(Exception, chexor,
'd41d8cd98f00b204e9800998ecf8427e', None, normalize_timestamp(1))
class TestGetDBConnection(unittest.TestCase):
def test_normal_case(self):
conn = get_db_connection(':memory:')
self.assert_(hasattr(conn, 'execute'))
def test_invalid_path(self):
self.assertRaises(DatabaseConnectionError, get_db_connection,
'invalid database path / name')
class TestDatabaseBroker(unittest.TestCase):
def setUp(self):
self.testdir = os.path.join(os.path.dirname(__file__), 'db')
rmtree(self.testdir, ignore_errors=1)
os.mkdir(self.testdir)
def tearDown(self):
rmtree(self.testdir, ignore_errors=1)
def test_memory_db_init(self):
broker = DatabaseBroker(':memory:')
self.assertEqual(broker.db_file, ':memory:')
self.assertRaises(AttributeError, broker.initialize,
normalize_timestamp('0'))
def test_disk_db_init(self):
db_file = os.path.join(self.testdir, '1.db')
broker = DatabaseBroker(db_file)
self.assertEqual(broker.db_file, db_file)
self.assert_(broker.conn is None)
def test_initialize(self):
self.assertRaises(AttributeError,
DatabaseBroker(':memory:').initialize,
normalize_timestamp('1'))
stub_dict = {}
def stub(*args, **kwargs):
for key in stub_dict.keys():
del stub_dict[key]
stub_dict['args'] = args
for key, value in kwargs.items():
stub_dict[key] = value
broker = DatabaseBroker(':memory:')
broker._initialize = stub
broker.initialize(normalize_timestamp('1'))
self.assert_(hasattr(stub_dict['args'][0], 'execute'))
self.assertEquals(stub_dict['args'][1], '0000000001.00000')
with broker.get() as conn:
conn.execute('SELECT * FROM outgoing_sync')
conn.execute('SELECT * FROM incoming_sync')
broker = DatabaseBroker(os.path.join(self.testdir, '1.db'))
broker._initialize = stub
broker.initialize(normalize_timestamp('1'))
self.assert_(hasattr(stub_dict['args'][0], 'execute'))
self.assertEquals(stub_dict['args'][1], '0000000001.00000')
with broker.get() as conn:
conn.execute('SELECT * FROM outgoing_sync')
conn.execute('SELECT * FROM incoming_sync')
def test_delete_db(self):
def init_stub(conn, put_timestamp):
conn.execute('CREATE TABLE test (one TEXT)')
conn.execute('CREATE TABLE test_stat (id TEXT)')
conn.execute('INSERT INTO test_stat (id) VALUES (?)',
(str(uuid4),))
conn.execute('INSERT INTO test (one) VALUES ("1")')
conn.commit()
stub_called = [False]
def delete_stub(*a, **kw):
stub_called[0] = True
broker = DatabaseBroker(':memory:')
broker.db_type = 'test'
broker._initialize = init_stub
# Initializes a good broker for us
broker.initialize(normalize_timestamp('1'))
self.assert_(broker.conn is not None)
broker._delete_db = delete_stub
stub_called[0] = False
broker.delete_db('2')
self.assert_(stub_called[0])
broker = DatabaseBroker(os.path.join(self.testdir, '1.db'))
broker.db_type = 'test'
broker._initialize = init_stub
broker.initialize(normalize_timestamp('1'))
broker._delete_db = delete_stub
stub_called[0] = False
broker.delete_db('2')
self.assert_(stub_called[0])
# ensure that metadata was cleared
m2 = broker.metadata
self.assert_(not any(v[0] for v in m2.itervalues()))
self.assert_(all(v[1] == normalize_timestamp('2')
for v in m2.itervalues()))
def test_get(self):
broker = DatabaseBroker(':memory:')
got_exc = False
try:
with broker.get() as conn:
conn.execute('SELECT 1')
except Exception:
got_exc = True
broker = DatabaseBroker(os.path.join(self.testdir, '1.db'))
got_exc = False
try:
with broker.get() as conn:
conn.execute('SELECT 1')
except Exception:
got_exc = True
self.assert_(got_exc)
def stub(*args, **kwargs):
pass
broker._initialize = stub
broker.initialize(normalize_timestamp('1'))
with broker.get() as conn:
conn.execute('CREATE TABLE test (one TEXT)')
try:
with broker.get() as conn:
conn.execute('INSERT INTO test (one) VALUES ("1")')
raise Exception('test')
conn.commit()
except Exception:
pass
broker = DatabaseBroker(os.path.join(self.testdir, '1.db'))
with broker.get() as conn:
self.assertEquals(
[r[0] for r in conn.execute('SELECT * FROM test')], [])
with broker.get() as conn:
conn.execute('INSERT INTO test (one) VALUES ("1")')
conn.commit()
broker = DatabaseBroker(os.path.join(self.testdir, '1.db'))
with broker.get() as conn:
self.assertEquals(
[r[0] for r in conn.execute('SELECT * FROM test')], ['1'])
orig_renamer = swift.common.db.renamer
try:
swift.common.db.renamer = lambda a, b: b
qpath = os.path.dirname(os.path.dirname(os.path.dirname(
os.path.dirname(self.testdir))))
if qpath:
qpath += '/quarantined/tests/db'
else:
qpath = 'quarantined/tests/db'
# Test malformed database
copy(os.path.join(os.path.dirname(__file__),
'malformed_example.db'),
os.path.join(self.testdir, '1.db'))
broker = DatabaseBroker(os.path.join(self.testdir, '1.db'))
broker.db_type = 'test'
exc = None
try:
with broker.get() as conn:
conn.execute('SELECT * FROM test')
except Exception, err:
exc = err
self.assertEquals(str(exc),
'Quarantined %s to %s due to malformed database' %
(self.testdir, qpath))
# Test corrupted database
copy(os.path.join(os.path.dirname(__file__),
'corrupted_example.db'),
os.path.join(self.testdir, '1.db'))
broker = DatabaseBroker(os.path.join(self.testdir, '1.db'))
broker.db_type = 'test'
exc = None
try:
with broker.get() as conn:
conn.execute('SELECT * FROM test')
except Exception, err:
exc = err
self.assertEquals(str(exc),
'Quarantined %s to %s due to corrupted database' %
(self.testdir, qpath))
finally:
swift.common.db.renamer = orig_renamer
def test_lock(self):
broker = DatabaseBroker(os.path.join(self.testdir, '1.db'), timeout=.1)
got_exc = False
try:
with broker.lock():
pass
except Exception:
got_exc = True
self.assert_(got_exc)
def stub(*args, **kwargs):
pass
broker._initialize = stub
broker.initialize(normalize_timestamp('1'))
with broker.lock():
pass
with broker.lock():
pass
broker2 = DatabaseBroker(os.path.join(self.testdir, '1.db'), timeout=.1)
broker2._initialize = stub
with broker.lock():
got_exc = False
try:
with broker2.lock():
pass
except LockTimeout:
got_exc = True
self.assert_(got_exc)
try:
with broker.lock():
raise Exception('test')
except Exception:
pass
with broker.lock():
pass
def test_newid(self):
broker = DatabaseBroker(':memory:')
broker.db_type = 'test'
broker.db_contains_type = 'test'
uuid1 = str(uuid4())
def _initialize(conn, timestamp):
conn.execute('CREATE TABLE test (one TEXT)')
conn.execute('CREATE TABLE test_stat (id TEXT)')
conn.execute('INSERT INTO test_stat (id) VALUES (?)', (uuid1,))
conn.commit()
broker._initialize = _initialize
broker.initialize(normalize_timestamp('1'))
uuid2 = str(uuid4())
broker.newid(uuid2)
with broker.get() as conn:
uuids = [r[0] for r in conn.execute('SELECT * FROM test_stat')]
self.assertEquals(len(uuids), 1)
self.assertNotEquals(uuids[0], uuid1)
uuid1 = uuids[0]
points = [(r[0], r[1]) for r in conn.execute('SELECT sync_point, '
'remote_id FROM incoming_sync WHERE remote_id = ?', (uuid2,))]
self.assertEquals(len(points), 1)
self.assertEquals(points[0][0], -1)
self.assertEquals(points[0][1], uuid2)
conn.execute('INSERT INTO test (one) VALUES ("1")')
conn.commit()
uuid3 = str(uuid4())
broker.newid(uuid3)
with broker.get() as conn:
uuids = [r[0] for r in conn.execute('SELECT * FROM test_stat')]
self.assertEquals(len(uuids), 1)
self.assertNotEquals(uuids[0], uuid1)
uuid1 = uuids[0]
points = [(r[0], r[1]) for r in conn.execute('SELECT sync_point, '
'remote_id FROM incoming_sync WHERE remote_id = ?', (uuid3,))]
self.assertEquals(len(points), 1)
self.assertEquals(points[0][1], uuid3)
broker.newid(uuid2)
with broker.get() as conn:
uuids = [r[0] for r in conn.execute('SELECT * FROM test_stat')]
self.assertEquals(len(uuids), 1)
self.assertNotEquals(uuids[0], uuid1)
points = [(r[0], r[1]) for r in conn.execute('SELECT sync_point, '
'remote_id FROM incoming_sync WHERE remote_id = ?', (uuid2,))]
self.assertEquals(len(points), 1)
self.assertEquals(points[0][1], uuid2)
def test_get_items_since(self):
broker = DatabaseBroker(':memory:')
broker.db_type = 'test'
broker.db_contains_type = 'test'
def _initialize(conn, timestamp):
conn.execute('CREATE TABLE test (one TEXT)')
conn.execute('INSERT INTO test (one) VALUES ("1")')
conn.execute('INSERT INTO test (one) VALUES ("2")')
conn.execute('INSERT INTO test (one) VALUES ("3")')
conn.commit()
broker._initialize = _initialize
broker.initialize(normalize_timestamp('1'))
self.assertEquals(broker.get_items_since(-1, 10),
[{'one': '1'}, {'one': '2'}, {'one': '3'}])
self.assertEquals(broker.get_items_since(-1, 2),
[{'one': '1'}, {'one': '2'}])
self.assertEquals(broker.get_items_since(1, 2),
[{'one': '2'}, {'one': '3'}])
self.assertEquals(broker.get_items_since(3, 2), [])
self.assertEquals(broker.get_items_since(999, 2), [])
def test_get_sync(self):
broker = DatabaseBroker(':memory:')
broker.db_type = 'test'
broker.db_contains_type = 'test'
uuid1 = str(uuid4())
def _initialize(conn, timestamp):
conn.execute('CREATE TABLE test (one TEXT)')
conn.execute('CREATE TABLE test_stat (id TEXT)')
conn.execute('INSERT INTO test_stat (id) VALUES (?)', (uuid1,))
conn.execute('INSERT INTO test (one) VALUES ("1")')
conn.commit()
pass
broker._initialize = _initialize
broker.initialize(normalize_timestamp('1'))
uuid2 = str(uuid4())
self.assertEquals(broker.get_sync(uuid2), -1)
broker.newid(uuid2)
self.assertEquals(broker.get_sync(uuid2), 1)
uuid3 = str(uuid4())
self.assertEquals(broker.get_sync(uuid3), -1)
with broker.get() as conn:
conn.execute('INSERT INTO test (one) VALUES ("2")')
conn.commit()
broker.newid(uuid3)
self.assertEquals(broker.get_sync(uuid2), 1)
self.assertEquals(broker.get_sync(uuid3), 2)
self.assertEquals(broker.get_sync(uuid2, incoming=False), -1)
self.assertEquals(broker.get_sync(uuid3, incoming=False), -1)
broker.merge_syncs([{'sync_point': 1, 'remote_id': uuid2}],
incoming=False)
self.assertEquals(broker.get_sync(uuid2), 1)
self.assertEquals(broker.get_sync(uuid3), 2)
self.assertEquals(broker.get_sync(uuid2, incoming=False), 1)
self.assertEquals(broker.get_sync(uuid3, incoming=False), -1)
broker.merge_syncs([{'sync_point': 2, 'remote_id': uuid3}],
incoming=False)
self.assertEquals(broker.get_sync(uuid2, incoming=False), 1)
self.assertEquals(broker.get_sync(uuid3, incoming=False), 2)
def test_merge_syncs(self):
broker = DatabaseBroker(':memory:')
def stub(*args, **kwargs):
pass
broker._initialize = stub
broker.initialize(normalize_timestamp('1'))
uuid2 = str(uuid4())
broker.merge_syncs([{'sync_point': 1, 'remote_id': uuid2}])
self.assertEquals(broker.get_sync(uuid2), 1)
uuid3 = str(uuid4())
broker.merge_syncs([{'sync_point': 2, 'remote_id': uuid3}])
self.assertEquals(broker.get_sync(uuid2), 1)
self.assertEquals(broker.get_sync(uuid3), 2)
self.assertEquals(broker.get_sync(uuid2, incoming=False), -1)
self.assertEquals(broker.get_sync(uuid3, incoming=False), -1)
broker.merge_syncs([{'sync_point': 3, 'remote_id': uuid2},
{'sync_point': 4, 'remote_id': uuid3}],
incoming=False)
self.assertEquals(broker.get_sync(uuid2, incoming=False), 3)
self.assertEquals(broker.get_sync(uuid3, incoming=False), 4)
self.assertEquals(broker.get_sync(uuid2), 1)
self.assertEquals(broker.get_sync(uuid3), 2)
broker.merge_syncs([{'sync_point': 5, 'remote_id': uuid2}])
self.assertEquals(broker.get_sync(uuid2), 5)
def test_get_replication_info(self):
self.get_replication_info_tester(metadata=False)
def test_get_replication_info_with_metadata(self):
self.get_replication_info_tester(metadata=True)
def get_replication_info_tester(self, metadata=False):
broker = DatabaseBroker(':memory:', account='a')
broker.db_type = 'test'
broker.db_contains_type = 'test'
broker_creation = normalize_timestamp(1)
broker_uuid = str(uuid4())
broker_metadata = metadata and simplejson.dumps(
{'Test': ('Value', normalize_timestamp(1))}) or ''
def _initialize(conn, put_timestamp):
if put_timestamp is None:
put_timestamp = normalize_timestamp(0)
conn.executescript('''
CREATE TABLE test (
ROWID INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT UNIQUE,
created_at TEXT
);
CREATE TRIGGER test_insert AFTER INSERT ON test
BEGIN
UPDATE test_stat
SET test_count = test_count + 1,
hash = chexor(hash, new.name, new.created_at);
END;
CREATE TRIGGER test_update BEFORE UPDATE ON test
BEGIN
SELECT RAISE(FAIL,
'UPDATE not allowed; DELETE and INSERT');
END;
CREATE TRIGGER test_delete AFTER DELETE ON test
BEGIN
UPDATE test_stat
SET test_count = test_count - 1,
hash = chexor(hash, old.name, old.created_at);
END;
CREATE TABLE test_stat (
account TEXT,
created_at TEXT,
put_timestamp TEXT DEFAULT '0',
delete_timestamp TEXT DEFAULT '0',
test_count INTEGER,
hash TEXT default '00000000000000000000000000000000',
id TEXT
%s
);
INSERT INTO test_stat (test_count) VALUES (0);
''' % (metadata and ", metadata TEXT DEFAULT ''" or ""))
conn.execute('''
UPDATE test_stat
SET account = ?, created_at = ?, id = ?, put_timestamp = ?
''', (broker.account, broker_creation, broker_uuid, put_timestamp))
if metadata:
conn.execute('UPDATE test_stat SET metadata = ?',
(broker_metadata,))
conn.commit()
broker._initialize = _initialize
put_timestamp = normalize_timestamp(2)
broker.initialize(put_timestamp)
info = broker.get_replication_info()
self.assertEquals(info, {'count': 0,
'hash': '00000000000000000000000000000000',
'created_at': broker_creation, 'put_timestamp': put_timestamp,
'delete_timestamp': '0', 'max_row': -1, 'id': broker_uuid,
'metadata': broker_metadata})
insert_timestamp = normalize_timestamp(3)
with broker.get() as conn:
conn.execute('''
INSERT INTO test (name, created_at) VALUES ('test', ?)
''', (insert_timestamp,))
conn.commit()
info = broker.get_replication_info()
self.assertEquals(info, {'count': 1,
'hash': 'bdc4c93f574b0d8c2911a27ce9dd38ba',
'created_at': broker_creation, 'put_timestamp': put_timestamp,
'delete_timestamp': '0', 'max_row': 1, 'id': broker_uuid,
'metadata': broker_metadata})
with broker.get() as conn:
conn.execute('DELETE FROM test')
conn.commit()
info = broker.get_replication_info()
self.assertEquals(info, {'count': 0,
'hash': '00000000000000000000000000000000',
'created_at': broker_creation, 'put_timestamp': put_timestamp,
'delete_timestamp': '0', 'max_row': 1, 'id': broker_uuid,
'metadata': broker_metadata})
return broker
def test_metadata(self):
# Initializes a good broker for us
broker = self.get_replication_info_tester(metadata=True)
# Add our first item
first_timestamp = normalize_timestamp(1)
first_value = '1'
broker.update_metadata({'First': [first_value, first_timestamp]})
self.assert_('First' in broker.metadata)
self.assertEquals(broker.metadata['First'],
[first_value, first_timestamp])
# Add our second item
second_timestamp = normalize_timestamp(2)
second_value = '2'
broker.update_metadata({'Second': [second_value, second_timestamp]})
self.assert_('First' in broker.metadata)
self.assertEquals(broker.metadata['First'],
[first_value, first_timestamp])
self.assert_('Second' in broker.metadata)
self.assertEquals(broker.metadata['Second'],
[second_value, second_timestamp])
# Update our first item
first_timestamp = normalize_timestamp(3)
first_value = '1b'
broker.update_metadata({'First': [first_value, first_timestamp]})
self.assert_('First' in broker.metadata)
self.assertEquals(broker.metadata['First'],
[first_value, first_timestamp])
self.assert_('Second' in broker.metadata)
self.assertEquals(broker.metadata['Second'],
[second_value, second_timestamp])
# Delete our second item (by setting to empty string)
second_timestamp = normalize_timestamp(4)
second_value = ''
broker.update_metadata({'Second': [second_value, second_timestamp]})
self.assert_('First' in broker.metadata)
self.assertEquals(broker.metadata['First'],
[first_value, first_timestamp])
self.assert_('Second' in broker.metadata)
self.assertEquals(broker.metadata['Second'],
[second_value, second_timestamp])
# Reclaim at point before second item was deleted
broker.reclaim(normalize_timestamp(3))
self.assert_('First' in broker.metadata)
self.assertEquals(broker.metadata['First'],
[first_value, first_timestamp])
self.assert_('Second' in broker.metadata)
self.assertEquals(broker.metadata['Second'],
[second_value, second_timestamp])
# Reclaim at point second item was deleted
broker.reclaim(normalize_timestamp(4))
self.assert_('First' in broker.metadata)
self.assertEquals(broker.metadata['First'],
[first_value, first_timestamp])
self.assert_('Second' in broker.metadata)
self.assertEquals(broker.metadata['Second'],
[second_value, second_timestamp])
# Reclaim after point second item was deleted
broker.reclaim(normalize_timestamp(5))
self.assert_('First' in broker.metadata)
self.assertEquals(broker.metadata['First'],
[first_value, first_timestamp])
self.assert_('Second' not in broker.metadata)
class TestContainerBroker(unittest.TestCase):
""" Tests for swift.common.db.ContainerBroker """
def test_creation(self):
""" Test swift.common.db.ContainerBroker.__init__ """
broker = ContainerBroker(':memory:', account='a', container='c')
self.assertEqual(broker.db_file, ':memory:')
broker.initialize(normalize_timestamp('1'))
with broker.get() as conn:
curs = conn.cursor()
curs.execute('SELECT 1')
self.assertEqual(curs.fetchall()[0][0], 1)
def test_exception(self):
""" Test swift.common.db.ContainerBroker throwing a conn away after
unhandled exception """
first_conn = None
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
with broker.get() as conn:
first_conn = conn
try:
with broker.get() as conn:
self.assertEquals(first_conn, conn)
raise Exception('OMG')
except Exception:
pass
self.assert_(broker.conn is None)
def test_empty(self):
""" Test swift.common.db.ContainerBroker.empty """
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
self.assert_(broker.empty())
broker.put_object('o', normalize_timestamp(time()), 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e')
self.assert_(not broker.empty())
sleep(.00001)
broker.delete_object('o', normalize_timestamp(time()))
self.assert_(broker.empty())
def test_reclaim(self):
broker = ContainerBroker(':memory:', account='test_account',
container='test_container')
broker.initialize(normalize_timestamp('1'))
broker.put_object('o', normalize_timestamp(time()), 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e')
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 0").fetchone()[0], 1)
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 0)
broker.reclaim(normalize_timestamp(time() - 999), time())
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 0").fetchone()[0], 1)
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 0)
sleep(.00001)
broker.delete_object('o', normalize_timestamp(time()))
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 0").fetchone()[0], 0)
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 1)
broker.reclaim(normalize_timestamp(time() - 999), time())
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 0").fetchone()[0], 0)
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 1)
sleep(.00001)
broker.reclaim(normalize_timestamp(time()), time())
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 0").fetchone()[0], 0)
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 0)
# Test the return values of reclaim()
broker.put_object('w', normalize_timestamp(time()), 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('x', normalize_timestamp(time()), 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('y', normalize_timestamp(time()), 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('z', normalize_timestamp(time()), 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e')
# Test before deletion
res = broker.reclaim(normalize_timestamp(time()), time())
broker.delete_db(normalize_timestamp(time()))
def test_delete_object(self):
""" Test swift.common.db.ContainerBroker.delete_object """
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
broker.put_object('o', normalize_timestamp(time()), 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e')
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 0").fetchone()[0], 1)
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 0)
sleep(.00001)
broker.delete_object('o', normalize_timestamp(time()))
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 0").fetchone()[0], 0)
self.assertEquals(conn.execute(
"SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 1)
def test_put_object(self):
""" Test swift.common.db.ContainerBroker.put_object """
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
# Create initial object
timestamp = normalize_timestamp(time())
broker.put_object('"{<object \'&\' name>}"', timestamp, 123,
'application/x-test',
'5af83e3196bf99f440f31f2e1a6c9afe')
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM object").fetchone()[0],
'"{<object \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT created_at FROM object").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT size FROM object").fetchone()[0], 123)
self.assertEquals(conn.execute(
"SELECT content_type FROM object").fetchone()[0],
'application/x-test')
self.assertEquals(conn.execute(
"SELECT etag FROM object").fetchone()[0],
'5af83e3196bf99f440f31f2e1a6c9afe')
self.assertEquals(conn.execute(
"SELECT deleted FROM object").fetchone()[0], 0)
# Reput same event
broker.put_object('"{<object \'&\' name>}"', timestamp, 123,
'application/x-test',
'5af83e3196bf99f440f31f2e1a6c9afe')
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM object").fetchone()[0],
'"{<object \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT created_at FROM object").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT size FROM object").fetchone()[0], 123)
self.assertEquals(conn.execute(
"SELECT content_type FROM object").fetchone()[0],
'application/x-test')
self.assertEquals(conn.execute(
"SELECT etag FROM object").fetchone()[0],
'5af83e3196bf99f440f31f2e1a6c9afe')
self.assertEquals(conn.execute(
"SELECT deleted FROM object").fetchone()[0], 0)
# Put new event
sleep(.00001)
timestamp = normalize_timestamp(time())
broker.put_object('"{<object \'&\' name>}"', timestamp, 124,
'application/x-test',
'aa0749bacbc79ec65fe206943d8fe449')
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM object").fetchone()[0],
'"{<object \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT created_at FROM object").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT size FROM object").fetchone()[0], 124)
self.assertEquals(conn.execute(
"SELECT content_type FROM object").fetchone()[0],
'application/x-test')
self.assertEquals(conn.execute(
"SELECT etag FROM object").fetchone()[0],
'aa0749bacbc79ec65fe206943d8fe449')
self.assertEquals(conn.execute(
"SELECT deleted FROM object").fetchone()[0], 0)
# Put old event
otimestamp = normalize_timestamp(float(timestamp) - 1)
broker.put_object('"{<object \'&\' name>}"', otimestamp, 124,
'application/x-test',
'aa0749bacbc79ec65fe206943d8fe449')
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM object").fetchone()[0],
'"{<object \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT created_at FROM object").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT size FROM object").fetchone()[0], 124)
self.assertEquals(conn.execute(
"SELECT content_type FROM object").fetchone()[0],
'application/x-test')
self.assertEquals(conn.execute(
"SELECT etag FROM object").fetchone()[0],
'aa0749bacbc79ec65fe206943d8fe449')
self.assertEquals(conn.execute(
"SELECT deleted FROM object").fetchone()[0], 0)
# Put old delete event
dtimestamp = normalize_timestamp(float(timestamp) - 1)
broker.put_object('"{<object \'&\' name>}"', dtimestamp, 0, '', '',
deleted=1)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM object").fetchone()[0],
'"{<object \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT created_at FROM object").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT size FROM object").fetchone()[0], 124)
self.assertEquals(conn.execute(
"SELECT content_type FROM object").fetchone()[0],
'application/x-test')
self.assertEquals(conn.execute(
"SELECT etag FROM object").fetchone()[0],
'aa0749bacbc79ec65fe206943d8fe449')
self.assertEquals(conn.execute(
"SELECT deleted FROM object").fetchone()[0], 0)
# Put new delete event
sleep(.00001)
timestamp = normalize_timestamp(time())
broker.put_object('"{<object \'&\' name>}"', timestamp, 0, '', '',
deleted=1)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM object").fetchone()[0],
'"{<object \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT created_at FROM object").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT deleted FROM object").fetchone()[0], 1)
# Put new event
sleep(.00001)
timestamp = normalize_timestamp(time())
broker.put_object('"{<object \'&\' name>}"', timestamp, 123,
'application/x-test',
'5af83e3196bf99f440f31f2e1a6c9afe')
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM object").fetchone()[0],
'"{<object \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT created_at FROM object").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT size FROM object").fetchone()[0], 123)
self.assertEquals(conn.execute(
"SELECT content_type FROM object").fetchone()[0],
'application/x-test')
self.assertEquals(conn.execute(
"SELECT etag FROM object").fetchone()[0],
'5af83e3196bf99f440f31f2e1a6c9afe')
self.assertEquals(conn.execute(
"SELECT deleted FROM object").fetchone()[0], 0)
# We'll use this later
sleep(.0001)
in_between_timestamp = normalize_timestamp(time())
# New post event
sleep(.0001)
previous_timestamp = timestamp
timestamp = normalize_timestamp(time())
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM object").fetchone()[0],
'"{<object \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT created_at FROM object").fetchone()[0],
previous_timestamp)
self.assertEquals(conn.execute(
"SELECT size FROM object").fetchone()[0], 123)
self.assertEquals(conn.execute(
"SELECT content_type FROM object").fetchone()[0],
'application/x-test')
self.assertEquals(conn.execute(
"SELECT etag FROM object").fetchone()[0],
'5af83e3196bf99f440f31f2e1a6c9afe')
self.assertEquals(conn.execute(
"SELECT deleted FROM object").fetchone()[0], 0)
# Put event from after last put but before last post
timestamp = in_between_timestamp
broker.put_object('"{<object \'&\' name>}"', timestamp, 456,
'application/x-test3',
'6af83e3196bf99f440f31f2e1a6c9afe')
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM object").fetchone()[0],
'"{<object \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT created_at FROM object").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT size FROM object").fetchone()[0], 456)
self.assertEquals(conn.execute(
"SELECT content_type FROM object").fetchone()[0],
'application/x-test3')
self.assertEquals(conn.execute(
"SELECT etag FROM object").fetchone()[0],
'6af83e3196bf99f440f31f2e1a6c9afe')
self.assertEquals(conn.execute(
"SELECT deleted FROM object").fetchone()[0], 0)
def test_get_info(self):
""" Test swift.common.db.ContainerBroker.get_info """
broker = ContainerBroker(':memory:', account='test1', container='test2')
broker.initialize(normalize_timestamp('1'))
info = broker.get_info()
self.assertEquals(info['account'], 'test1')
self.assertEquals(info['container'], 'test2')
self.assertEquals(info['hash'], '00000000000000000000000000000000')
info = broker.get_info()
self.assertEquals(info['object_count'], 0)
self.assertEquals(info['bytes_used'], 0)
broker.put_object('o1', normalize_timestamp(time()), 123, 'text/plain',
'5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info()
self.assertEquals(info['object_count'], 1)
self.assertEquals(info['bytes_used'], 123)
sleep(.00001)
broker.put_object('o2', normalize_timestamp(time()), 123, 'text/plain',
'5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info()
self.assertEquals(info['object_count'], 2)
self.assertEquals(info['bytes_used'], 246)
sleep(.00001)
broker.put_object('o2', normalize_timestamp(time()), 1000,
'text/plain', '5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info()
self.assertEquals(info['object_count'], 2)
self.assertEquals(info['bytes_used'], 1123)
sleep(.00001)
broker.delete_object('o1', normalize_timestamp(time()))
info = broker.get_info()
self.assertEquals(info['object_count'], 1)
self.assertEquals(info['bytes_used'], 1000)
sleep(.00001)
broker.delete_object('o2', normalize_timestamp(time()))
info = broker.get_info()
self.assertEquals(info['object_count'], 0)
self.assertEquals(info['bytes_used'], 0)
info = broker.get_info()
self.assertEquals(info['x_container_sync_point1'], -1)
self.assertEquals(info['x_container_sync_point2'], -1)
def test_set_x_syncs(self):
broker = ContainerBroker(':memory:', account='test1', container='test2')
broker.initialize(normalize_timestamp('1'))
info = broker.get_info()
self.assertEquals(info['x_container_sync_point1'], -1)
self.assertEquals(info['x_container_sync_point2'], -1)
broker.set_x_container_sync_points(1, 2)
info = broker.get_info()
self.assertEquals(info['x_container_sync_point1'], 1)
self.assertEquals(info['x_container_sync_point2'], 2)
def test_get_report_info(self):
broker = ContainerBroker(':memory:', account='test1', container='test2')
broker.initialize(normalize_timestamp('1'))
info = broker.get_info()
self.assertEquals(info['account'], 'test1')
self.assertEquals(info['container'], 'test2')
self.assertEquals(info['object_count'], 0)
self.assertEquals(info['bytes_used'], 0)
self.assertEquals(info['reported_object_count'], 0)
self.assertEquals(info['reported_bytes_used'], 0)
broker.put_object('o1', normalize_timestamp(time()), 123, 'text/plain',
'5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info()
self.assertEquals(info['object_count'], 1)
self.assertEquals(info['bytes_used'], 123)
self.assertEquals(info['reported_object_count'], 0)
self.assertEquals(info['reported_bytes_used'], 0)
sleep(.00001)
broker.put_object('o2', normalize_timestamp(time()), 123, 'text/plain',
'5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info()
self.assertEquals(info['object_count'], 2)
self.assertEquals(info['bytes_used'], 246)
self.assertEquals(info['reported_object_count'], 0)
self.assertEquals(info['reported_bytes_used'], 0)
sleep(.00001)
broker.put_object('o2', normalize_timestamp(time()), 1000,
'text/plain', '5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info()
self.assertEquals(info['object_count'], 2)
self.assertEquals(info['bytes_used'], 1123)
self.assertEquals(info['reported_object_count'], 0)
self.assertEquals(info['reported_bytes_used'], 0)
put_timestamp = normalize_timestamp(time())
sleep(.001)
delete_timestamp = normalize_timestamp(time())
broker.reported(put_timestamp, delete_timestamp, 2, 1123)
info = broker.get_info()
self.assertEquals(info['object_count'], 2)
self.assertEquals(info['bytes_used'], 1123)
self.assertEquals(info['reported_put_timestamp'], put_timestamp)
self.assertEquals(info['reported_delete_timestamp'], delete_timestamp)
self.assertEquals(info['reported_object_count'], 2)
self.assertEquals(info['reported_bytes_used'], 1123)
sleep(.00001)
broker.delete_object('o1', normalize_timestamp(time()))
info = broker.get_info()
self.assertEquals(info['object_count'], 1)
self.assertEquals(info['bytes_used'], 1000)
self.assertEquals(info['reported_object_count'], 2)
self.assertEquals(info['reported_bytes_used'], 1123)
sleep(.00001)
broker.delete_object('o2', normalize_timestamp(time()))
info = broker.get_info()
self.assertEquals(info['object_count'], 0)
self.assertEquals(info['bytes_used'], 0)
self.assertEquals(info['reported_object_count'], 2)
self.assertEquals(info['reported_bytes_used'], 1123)
def test_list_objects_iter(self):
""" Test swift.common.db.ContainerBroker.list_objects_iter """
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
for obj1 in xrange(4):
for obj2 in xrange(125):
broker.put_object('%d/%04d' % (obj1, obj2),
normalize_timestamp(time()), 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e')
for obj in xrange(125):
broker.put_object('2/0051/%04d' % obj,
normalize_timestamp(time()), 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e')
for obj in xrange(125):
broker.put_object('3/%04d/0049' % obj,
normalize_timestamp(time()), 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e')
listing = broker.list_objects_iter(100, '', None, None, '')
self.assertEquals(len(listing), 100)
self.assertEquals(listing[0][0], '0/0000')
self.assertEquals(listing[-1][0], '0/0099')
listing = broker.list_objects_iter(100, '', '0/0050', None, '')
self.assertEquals(len(listing), 50)
self.assertEquals(listing[0][0], '0/0000')
self.assertEquals(listing[-1][0], '0/0049')
listing = broker.list_objects_iter(100, '0/0099', None, None, '')
self.assertEquals(len(listing), 100)
self.assertEquals(listing[0][0], '0/0100')
self.assertEquals(listing[-1][0], '1/0074')
listing = broker.list_objects_iter(55, '1/0074', None, None, '')
self.assertEquals(len(listing), 55)
self.assertEquals(listing[0][0], '1/0075')
self.assertEquals(listing[-1][0], '2/0004')
listing = broker.list_objects_iter(10, '', None, '0/01', '')
self.assertEquals(len(listing), 10)
self.assertEquals(listing[0][0], '0/0100')
self.assertEquals(listing[-1][0], '0/0109')
listing = broker.list_objects_iter(10, '', None, '0/', '/')
self.assertEquals(len(listing), 10)
self.assertEquals(listing[0][0], '0/0000')
self.assertEquals(listing[-1][0], '0/0009')
listing = broker.list_objects_iter(10, '', None, '', '/')
self.assertEquals(len(listing), 4)
self.assertEquals([row[0] for row in listing],
['0/', '1/', '2/', '3/'])
listing = broker.list_objects_iter(10, '2', None, None, '/')
self.assertEquals(len(listing), 2)
self.assertEquals([row[0] for row in listing], ['2/', '3/'])
listing = broker.list_objects_iter(10, '2/',None, None, '/')
self.assertEquals(len(listing), 1)
self.assertEquals([row[0] for row in listing], ['3/'])
listing = broker.list_objects_iter(10, '2/0050', None, '2/', '/')
self.assertEquals(len(listing), 10)
self.assertEquals(listing[0][0], '2/0051')
self.assertEquals(listing[1][0], '2/0051/')
self.assertEquals(listing[2][0], '2/0052')
self.assertEquals(listing[-1][0], '2/0059')
listing = broker.list_objects_iter(10, '3/0045', None, '3/', '/')
self.assertEquals(len(listing), 10)
self.assertEquals([row[0] for row in listing],
['3/0045/', '3/0046', '3/0046/', '3/0047',
'3/0047/', '3/0048', '3/0048/', '3/0049',
'3/0049/', '3/0050'])
broker.put_object('3/0049/', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
listing = broker.list_objects_iter(10, '3/0048', None, None, None)
self.assertEquals(len(listing), 10)
self.assertEquals([row[0] for row in listing],
['3/0048/0049', '3/0049', '3/0049/',
'3/0049/0049', '3/0050', '3/0050/0049', '3/0051', '3/0051/0049',
'3/0052', '3/0052/0049'])
listing = broker.list_objects_iter(10, '3/0048', None, '3/', '/')
self.assertEquals(len(listing), 10)
self.assertEquals([row[0] for row in listing],
['3/0048/', '3/0049', '3/0049/', '3/0050',
'3/0050/', '3/0051', '3/0051/', '3/0052', '3/0052/', '3/0053'])
listing = broker.list_objects_iter(10, None, None, '3/0049/', '/')
self.assertEquals(len(listing), 2)
self.assertEquals([row[0] for row in listing],
['3/0049/', '3/0049/0049'])
listing = broker.list_objects_iter(10, None, None, None, None,
'3/0049')
self.assertEquals(len(listing), 1)
self.assertEquals([row[0] for row in listing], ['3/0049/0049'])
listing = broker.list_objects_iter(2, None, None, '3/', '/')
self.assertEquals(len(listing), 2)
self.assertEquals([row[0] for row in listing], ['3/0000', '3/0000/'])
listing = broker.list_objects_iter(2, None, None, None, None, '3')
self.assertEquals(len(listing), 2)
self.assertEquals([row[0] for row in listing], ['3/0000', '3/0001'])
def test_list_objects_iter_prefix_delim(self):
""" Test swift.common.db.ContainerBroker.list_objects_iter """
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
broker.put_object('/pets/dogs/1', normalize_timestamp(0), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('/pets/dogs/2', normalize_timestamp(0), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('/pets/fish/a', normalize_timestamp(0), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('/pets/fish/b', normalize_timestamp(0), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('/pets/fish_info.txt', normalize_timestamp(0), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('/snakes', normalize_timestamp(0), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
#def list_objects_iter(self, limit, marker, prefix, delimiter, path=None,
# format=None):
listing = broker.list_objects_iter(100, None, None, '/pets/f', '/')
self.assertEquals([row[0] for row in listing], ['/pets/fish/', '/pets/fish_info.txt'])
listing = broker.list_objects_iter(100, None, None, '/pets/fish', '/')
self.assertEquals([row[0] for row in listing], ['/pets/fish/', '/pets/fish_info.txt'])
listing = broker.list_objects_iter(100, None, None, '/pets/fish/', '/')
self.assertEquals([row[0] for row in listing], ['/pets/fish/a', '/pets/fish/b'])
def test_double_check_trailing_delimiter(self):
""" Test swift.common.db.ContainerBroker.list_objects_iter for a
container that has an odd file with a trailing delimiter """
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
broker.put_object('a', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/a', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/a/a', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/a/b', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/b', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('b', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('b/a', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('b/b', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('c', normalize_timestamp(time()), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
listing = broker.list_objects_iter(15, None, None, None, None)
self.assertEquals(len(listing), 10)
self.assertEquals([row[0] for row in listing],
['a', 'a/', 'a/a', 'a/a/a', 'a/a/b', 'a/b', 'b', 'b/a', 'b/b', 'c'])
listing = broker.list_objects_iter(15, None, None, '', '/')
self.assertEquals(len(listing), 5)
self.assertEquals([row[0] for row in listing],
['a', 'a/', 'b', 'b/', 'c'])
listing = broker.list_objects_iter(15, None, None, 'a/', '/')
self.assertEquals(len(listing), 4)
self.assertEquals([row[0] for row in listing],
['a/', 'a/a', 'a/a/', 'a/b'])
listing = broker.list_objects_iter(15, None, None, 'b/', '/')
self.assertEquals(len(listing), 2)
self.assertEquals([row[0] for row in listing], ['b/a', 'b/b'])
def test_chexor(self):
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
broker.put_object('a', normalize_timestamp(1), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('b', normalize_timestamp(2), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
hasha = hashlib.md5('%s-%s' % ('a', '0000000001.00000')).digest()
hashb = hashlib.md5('%s-%s' % ('b', '0000000002.00000')).digest()
hashc = ''.join(('%2x' % (ord(a)^ord(b)) for a, b in zip(hasha, hashb)))
self.assertEquals(broker.get_info()['hash'], hashc)
broker.put_object('b', normalize_timestamp(3), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
hashb = hashlib.md5('%s-%s' % ('b', '0000000003.00000')).digest()
hashc = ''.join(('%02x' % (ord(a)^ord(b)) for a, b in zip(hasha, hashb)))
self.assertEquals(broker.get_info()['hash'], hashc)
def test_newid(self):
"""test DatabaseBroker.newid"""
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
id = broker.get_info()['id']
broker.newid('someid')
self.assertNotEquals(id, broker.get_info()['id'])
def test_get_items_since(self):
"""test DatabaseBroker.get_items_since"""
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
broker.put_object('a', normalize_timestamp(1), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
max_row = broker.get_replication_info()['max_row']
broker.put_object('b', normalize_timestamp(2), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
items = broker.get_items_since(max_row, 1000)
self.assertEquals(len(items), 1)
self.assertEquals(items[0]['name'], 'b')
def test_sync_merging(self):
""" exercise the DatabaseBroker sync functions a bit """
broker1 = ContainerBroker(':memory:', account='a', container='c')
broker1.initialize(normalize_timestamp('1'))
broker2 = ContainerBroker(':memory:', account='a', container='c')
broker2.initialize(normalize_timestamp('1'))
self.assertEquals(broker2.get_sync('12345'), -1)
broker1.merge_syncs([{'sync_point': 3, 'remote_id': '12345'}])
broker2.merge_syncs(broker1.get_syncs())
self.assertEquals(broker2.get_sync('12345'), 3)
def test_merge_items(self):
broker1 = ContainerBroker(':memory:', account='a', container='c')
broker1.initialize(normalize_timestamp('1'))
broker2 = ContainerBroker(':memory:', account='a', container='c')
broker2.initialize(normalize_timestamp('1'))
broker1.put_object('a', normalize_timestamp(1), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker1.put_object('b', normalize_timestamp(2), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
id = broker1.get_info()['id']
broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id)
items = broker2.get_items_since(-1, 1000)
self.assertEquals(len(items), 2)
self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items]))
broker1.put_object('c', normalize_timestamp(3), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id)
items = broker2.get_items_since(-1, 1000)
self.assertEquals(len(items), 3)
self.assertEquals(['a', 'b', 'c'],
sorted([rec['name'] for rec in items]))
def test_merge_items_overwrite(self):
"""test DatabaseBroker.merge_items"""
broker1 = ContainerBroker(':memory:', account='a', container='c')
broker1.initialize(normalize_timestamp('1'))
id = broker1.get_info()['id']
broker2 = ContainerBroker(':memory:', account='a', container='c')
broker2.initialize(normalize_timestamp('1'))
broker1.put_object('a', normalize_timestamp(2), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker1.put_object('b', normalize_timestamp(3), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id)
broker1.put_object('a', normalize_timestamp(4), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id)
items = broker2.get_items_since(-1, 1000)
self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items]))
for rec in items:
if rec['name'] == 'a':
self.assertEquals(rec['created_at'], normalize_timestamp(4))
if rec['name'] == 'b':
self.assertEquals(rec['created_at'], normalize_timestamp(3))
def test_merge_items_post_overwrite_out_of_order(self):
"""test DatabaseBroker.merge_items"""
broker1 = ContainerBroker(':memory:', account='a', container='c')
broker1.initialize(normalize_timestamp('1'))
id = broker1.get_info()['id']
broker2 = ContainerBroker(':memory:', account='a', container='c')
broker2.initialize(normalize_timestamp('1'))
broker1.put_object('a', normalize_timestamp(2), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker1.put_object('b', normalize_timestamp(3), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id)
broker1.put_object('a', normalize_timestamp(4), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id)
items = broker2.get_items_since(-1, 1000)
self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items]))
for rec in items:
if rec['name'] == 'a':
self.assertEquals(rec['created_at'], normalize_timestamp(4))
if rec['name'] == 'b':
self.assertEquals(rec['created_at'], normalize_timestamp(3))
self.assertEquals(rec['content_type'], 'text/plain')
items = broker2.get_items_since(-1, 1000)
self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items]))
for rec in items:
if rec['name'] == 'a':
self.assertEquals(rec['created_at'], normalize_timestamp(4))
if rec['name'] == 'b':
self.assertEquals(rec['created_at'], normalize_timestamp(3))
broker1.put_object('b', normalize_timestamp(5), 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id)
items = broker2.get_items_since(-1, 1000)
self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items]))
for rec in items:
if rec['name'] == 'a':
self.assertEquals(rec['created_at'], normalize_timestamp(4))
if rec['name'] == 'b':
self.assertEquals(rec['created_at'], normalize_timestamp(5))
self.assertEquals(rec['content_type'], 'text/plain')
def premetadata_create_container_stat_table(self, conn, put_timestamp=None):
"""
Copied from swift.common.db.ContainerBroker before the metadata column was
added; used for testing with TestContainerBrokerBeforeMetadata.
Create the container_stat table which is specifc to the container DB.
:param conn: DB connection object
:param put_timestamp: put timestamp
"""
if put_timestamp is None:
put_timestamp = normalize_timestamp(0)
conn.executescript("""
CREATE TABLE container_stat (
account TEXT,
container TEXT,
created_at TEXT,
put_timestamp TEXT DEFAULT '0',
delete_timestamp TEXT DEFAULT '0',
object_count INTEGER,
bytes_used INTEGER,
reported_put_timestamp TEXT DEFAULT '0',
reported_delete_timestamp TEXT DEFAULT '0',
reported_object_count INTEGER DEFAULT 0,
reported_bytes_used INTEGER DEFAULT 0,
hash TEXT default '00000000000000000000000000000000',
id TEXT,
status TEXT DEFAULT '',
status_changed_at TEXT DEFAULT '0'
);
INSERT INTO container_stat (object_count, bytes_used)
VALUES (0, 0);
""")
conn.execute('''
UPDATE container_stat
SET account = ?, container = ?, created_at = ?, id = ?,
put_timestamp = ?
''', (self.account, self.container, normalize_timestamp(time()),
str(uuid4()), put_timestamp))
class TestContainerBrokerBeforeMetadata(TestContainerBroker):
"""
Tests for swift.common.db.ContainerBroker against databases created before
the metadata column was added.
"""
def setUp(self):
self._imported_create_container_stat_table = \
ContainerBroker.create_container_stat_table
ContainerBroker.create_container_stat_table = \
premetadata_create_container_stat_table
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
exc = None
with broker.get() as conn:
try:
conn.execute('SELECT metadata FROM container_stat')
except BaseException, err:
exc = err
self.assert_('no such column: metadata' in str(exc))
def tearDown(self):
ContainerBroker.create_container_stat_table = \
self._imported_create_container_stat_table
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
with broker.get() as conn:
conn.execute('SELECT metadata FROM container_stat')
def prexsync_create_container_stat_table(self, conn, put_timestamp=None):
"""
Copied from swift.common.db.ContainerBroker before the
x_container_sync_point[12] columns were added; used for testing with
TestContainerBrokerBeforeXSync.
Create the container_stat table which is specifc to the container DB.
:param conn: DB connection object
:param put_timestamp: put timestamp
"""
if put_timestamp is None:
put_timestamp = normalize_timestamp(0)
conn.executescript("""
CREATE TABLE container_stat (
account TEXT,
container TEXT,
created_at TEXT,
put_timestamp TEXT DEFAULT '0',
delete_timestamp TEXT DEFAULT '0',
object_count INTEGER,
bytes_used INTEGER,
reported_put_timestamp TEXT DEFAULT '0',
reported_delete_timestamp TEXT DEFAULT '0',
reported_object_count INTEGER DEFAULT 0,
reported_bytes_used INTEGER DEFAULT 0,
hash TEXT default '00000000000000000000000000000000',
id TEXT,
status TEXT DEFAULT '',
status_changed_at TEXT DEFAULT '0',
metadata TEXT DEFAULT ''
);
INSERT INTO container_stat (object_count, bytes_used)
VALUES (0, 0);
""")
conn.execute('''
UPDATE container_stat
SET account = ?, container = ?, created_at = ?, id = ?,
put_timestamp = ?
''', (self.account, self.container, normalize_timestamp(time()),
str(uuid4()), put_timestamp))
class TestContainerBrokerBeforeXSync(TestContainerBroker):
"""
Tests for swift.common.db.ContainerBroker against databases created before
the x_container_sync_point[12] columns were added.
"""
def setUp(self):
self._imported_create_container_stat_table = \
ContainerBroker.create_container_stat_table
ContainerBroker.create_container_stat_table = \
prexsync_create_container_stat_table
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
exc = None
with broker.get() as conn:
try:
conn.execute('''SELECT x_container_sync_point1
FROM container_stat''')
except BaseException, err:
exc = err
self.assert_('no such column: x_container_sync_point1' in str(exc))
def tearDown(self):
ContainerBroker.create_container_stat_table = \
self._imported_create_container_stat_table
broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'))
with broker.get() as conn:
conn.execute('SELECT x_container_sync_point1 FROM container_stat')
class TestAccountBroker(unittest.TestCase):
""" Tests for swift.common.db.AccountBroker """
def test_creation(self):
""" Test swift.common.db.AccountBroker.__init__ """
broker = AccountBroker(':memory:', account='a')
self.assertEqual(broker.db_file, ':memory:')
got_exc = False
try:
with broker.get() as conn:
pass
except Exception:
got_exc = True
self.assert_(got_exc)
broker.initialize(normalize_timestamp('1'))
with broker.get() as conn:
curs = conn.cursor()
curs.execute('SELECT 1')
self.assertEqual(curs.fetchall()[0][0], 1)
def test_exception(self):
""" Test swift.common.db.AccountBroker throwing a conn away after
exception """
first_conn = None
broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1'))
with broker.get() as conn:
first_conn = conn
try:
with broker.get() as conn:
self.assertEquals(first_conn, conn)
raise Exception('OMG')
except Exception:
pass
self.assert_(broker.conn is None)
def test_empty(self):
""" Test swift.common.db.AccountBroker.empty """
broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1'))
self.assert_(broker.empty())
broker.put_container('o', normalize_timestamp(time()), 0, 0, 0)
self.assert_(not broker.empty())
sleep(.00001)
broker.put_container('o', 0, normalize_timestamp(time()), 0, 0)
self.assert_(broker.empty())
def test_reclaim(self):
broker = AccountBroker(':memory:', account='test_account')
broker.initialize(normalize_timestamp('1'))
broker.put_container('c', normalize_timestamp(time()), 0, 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 0").fetchone()[0], 1)
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 0)
broker.reclaim(normalize_timestamp(time() - 999), time())
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 0").fetchone()[0], 1)
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 0)
sleep(.00001)
broker.put_container('c', 0, normalize_timestamp(time()), 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 0").fetchone()[0], 0)
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 1)
broker.reclaim(normalize_timestamp(time() - 999), time())
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 0").fetchone()[0], 0)
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 1)
sleep(.00001)
broker.reclaim(normalize_timestamp(time()), time())
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 0").fetchone()[0], 0)
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 0)
# Test reclaim after deletion. Create 3 test containers
broker.put_container('x', 0, 0, 0, 0)
broker.put_container('y', 0, 0, 0, 0)
broker.put_container('z', 0, 0, 0, 0)
res = broker.reclaim(normalize_timestamp(time()), time())
# self.assertEquals(len(res), 2)
# self.assert_(isinstance(res, tuple))
# containers, account_name = res
# self.assert_(containers is None)
# self.assert_(account_name is None)
# Now delete the account
broker.delete_db(normalize_timestamp(time()))
res = broker.reclaim(normalize_timestamp(time()), time())
# self.assertEquals(len(res), 2)
# self.assert_(isinstance(res, tuple))
# containers, account_name = res
# self.assertEquals(account_name, 'test_account')
# self.assertEquals(len(containers), 3)
# self.assert_('x' in containers)
# self.assert_('y' in containers)
# self.assert_('z' in containers)
# self.assert_('a' not in containers)
def test_delete_container(self):
""" Test swift.common.db.AccountBroker.delete_container """
broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1'))
broker.put_container('o', normalize_timestamp(time()), 0, 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 0").fetchone()[0], 1)
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 0)
sleep(.00001)
broker.put_container('o', 0, normalize_timestamp(time()), 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 0").fetchone()[0], 0)
self.assertEquals(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 1)
def test_get_container_timestamp(self):
""" Test swift.common.db.AccountBroker.get_container_timestamp """
broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1'))
# Create initial container
timestamp = normalize_timestamp(time())
broker.put_container('container_name', timestamp, 0, 0, 0)
# test extant map
ts = broker.get_container_timestamp('container_name')
self.assertEquals(ts, timestamp)
# test missing map
ts = broker.get_container_timestamp('something else')
self.assertEquals(ts, None)
def test_put_container(self):
""" Test swift.common.db.AccountBroker.put_container """
broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1'))
# Create initial container
timestamp = normalize_timestamp(time())
broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM container").fetchone()[0],
'"{<container \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT put_timestamp FROM container").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT deleted FROM container").fetchone()[0], 0)
# Reput same event
broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM container").fetchone()[0],
'"{<container \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT put_timestamp FROM container").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT deleted FROM container").fetchone()[0], 0)
# Put new event
sleep(.00001)
timestamp = normalize_timestamp(time())
broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM container").fetchone()[0],
'"{<container \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT put_timestamp FROM container").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT deleted FROM container").fetchone()[0], 0)
# Put old event
otimestamp = normalize_timestamp(float(timestamp) - 1)
broker.put_container('"{<container \'&\' name>}"', otimestamp, 0, 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM container").fetchone()[0],
'"{<container \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT put_timestamp FROM container").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT deleted FROM container").fetchone()[0], 0)
# Put old delete event
dtimestamp = normalize_timestamp(float(timestamp) - 1)
broker.put_container('"{<container \'&\' name>}"', 0, dtimestamp, 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM container").fetchone()[0],
'"{<container \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT put_timestamp FROM container").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT delete_timestamp FROM container").fetchone()[0],
dtimestamp)
self.assertEquals(conn.execute(
"SELECT deleted FROM container").fetchone()[0], 0)
# Put new delete event
sleep(.00001)
timestamp = normalize_timestamp(time())
broker.put_container('"{<container \'&\' name>}"', 0, timestamp, 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM container").fetchone()[0],
'"{<container \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT delete_timestamp FROM container").fetchone()[0],
timestamp)
self.assertEquals(conn.execute(
"SELECT deleted FROM container").fetchone()[0], 1)
# Put new event
sleep(.00001)
timestamp = normalize_timestamp(time())
broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0)
with broker.get() as conn:
self.assertEquals(conn.execute(
"SELECT name FROM container").fetchone()[0],
'"{<container \'&\' name>}"')
self.assertEquals(conn.execute(
"SELECT put_timestamp FROM container").fetchone()[0], timestamp)
self.assertEquals(conn.execute(
"SELECT deleted FROM container").fetchone()[0], 0)
def test_get_info(self):
""" Test swift.common.db.AccountBroker.get_info """
broker = AccountBroker(':memory:', account='test1')
broker.initialize(normalize_timestamp('1'))
info = broker.get_info()
self.assertEquals(info['account'], 'test1')
self.assertEquals(info['hash'], '00000000000000000000000000000000')
info = broker.get_info()
self.assertEquals(info['container_count'], 0)
broker.put_container('c1', normalize_timestamp(time()), 0, 0, 0)
info = broker.get_info()
self.assertEquals(info['container_count'], 1)
sleep(.00001)
broker.put_container('c2', normalize_timestamp(time()), 0, 0, 0)
info = broker.get_info()
self.assertEquals(info['container_count'], 2)
sleep(.00001)
broker.put_container('c2', normalize_timestamp(time()), 0, 0, 0)
info = broker.get_info()
self.assertEquals(info['container_count'], 2)
sleep(.00001)
broker.put_container('c1', 0, normalize_timestamp(time()), 0, 0)
info = broker.get_info()
self.assertEquals(info['container_count'], 1)
sleep(.00001)
broker.put_container('c2', 0, normalize_timestamp(time()), 0, 0)
info = broker.get_info()
self.assertEquals(info['container_count'], 0)
def test_list_containers_iter(self):
""" Test swift.common.db.AccountBroker.list_containers_iter """
broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1'))
for cont1 in xrange(4):
for cont2 in xrange(125):
broker.put_container('%d/%04d' % (cont1, cont2),
normalize_timestamp(time()), 0, 0, 0)
for cont in xrange(125):
broker.put_container('2/0051/%04d' % cont,
normalize_timestamp(time()), 0, 0, 0)
for cont in xrange(125):
broker.put_container('3/%04d/0049' % cont,
normalize_timestamp(time()), 0, 0, 0)
listing = broker.list_containers_iter(100, '', None, None, '')
self.assertEquals(len(listing), 100)
self.assertEquals(listing[0][0], '0/0000')
self.assertEquals(listing[-1][0], '0/0099')
listing = broker.list_containers_iter(100, '', '0/0050', None, '')
self.assertEquals(len(listing), 51)
self.assertEquals(listing[0][0], '0/0000')
self.assertEquals(listing[-1][0], '0/0050')
listing = broker.list_containers_iter(100, '0/0099', None, None, '')
self.assertEquals(len(listing), 100)
self.assertEquals(listing[0][0], '0/0100')
self.assertEquals(listing[-1][0], '1/0074')
listing = broker.list_containers_iter(55, '1/0074', None, None, '')
self.assertEquals(len(listing), 55)
self.assertEquals(listing[0][0], '1/0075')
self.assertEquals(listing[-1][0], '2/0004')
listing = broker.list_containers_iter(10, '', None, '0/01', '')
self.assertEquals(len(listing), 10)
self.assertEquals(listing[0][0], '0/0100')
self.assertEquals(listing[-1][0], '0/0109')
listing = broker.list_containers_iter(10, '', None, '0/01', '/')
self.assertEquals(len(listing), 10)
self.assertEquals(listing[0][0], '0/0100')
self.assertEquals(listing[-1][0], '0/0109')
listing = broker.list_containers_iter(10, '', None, '0/', '/')
self.assertEquals(len(listing), 10)
self.assertEquals(listing[0][0], '0/0000')
self.assertEquals(listing[-1][0], '0/0009')
listing = broker.list_containers_iter(10, '', None, '', '/')
self.assertEquals(len(listing), 4)
self.assertEquals([row[0] for row in listing],
['0/', '1/', '2/', '3/'])
listing = broker.list_containers_iter(10, '2/', None, None, '/')
self.assertEquals(len(listing), 1)
self.assertEquals([row[0] for row in listing], ['3/'])
listing = broker.list_containers_iter(10, '', None, '2', '/')
self.assertEquals(len(listing), 1)
self.assertEquals([row[0] for row in listing], ['2/'])
listing = broker.list_containers_iter(10, '2/0050', None, '2/', '/')
self.assertEquals(len(listing), 10)
self.assertEquals(listing[0][0], '2/0051')
self.assertEquals(listing[1][0], '2/0051/')
self.assertEquals(listing[2][0], '2/0052')
self.assertEquals(listing[-1][0], '2/0059')
listing = broker.list_containers_iter(10, '3/0045', None, '3/', '/')
self.assertEquals(len(listing), 10)
self.assertEquals([row[0] for row in listing],
['3/0045/', '3/0046', '3/0046/', '3/0047',
'3/0047/', '3/0048', '3/0048/', '3/0049',
'3/0049/', '3/0050'])
broker.put_container('3/0049/', normalize_timestamp(time()), 0, 0, 0)
listing = broker.list_containers_iter(10, '3/0048', None, None, None)
self.assertEquals(len(listing), 10)
self.assertEquals([row[0] for row in listing],
['3/0048/0049', '3/0049', '3/0049/', '3/0049/0049',
'3/0050', '3/0050/0049', '3/0051', '3/0051/0049',
'3/0052', '3/0052/0049'])
listing = broker.list_containers_iter(10, '3/0048', None, '3/', '/')
self.assertEquals(len(listing), 10)
self.assertEquals([row[0] for row in listing],
['3/0048/', '3/0049', '3/0049/', '3/0050',
'3/0050/', '3/0051', '3/0051/', '3/0052',
'3/0052/', '3/0053'])
listing = broker.list_containers_iter(10, None, None, '3/0049/', '/')
self.assertEquals(len(listing), 2)
self.assertEquals([row[0] for row in listing],
['3/0049/', '3/0049/0049'])
def test_double_check_trailing_delimiter(self):
""" Test swift.common.db.AccountBroker.list_containers_iter for an
account that has an odd file with a trailing delimiter """
broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1'))
broker.put_container('a', normalize_timestamp(time()), 0, 0, 0)
broker.put_container('a/', normalize_timestamp(time()), 0, 0, 0)
broker.put_container('a/a', normalize_timestamp(time()), 0, 0, 0)
broker.put_container('a/a/a', normalize_timestamp(time()), 0, 0, 0)
broker.put_container('a/a/b', normalize_timestamp(time()), 0, 0, 0)
broker.put_container('a/b', normalize_timestamp(time()), 0, 0, 0)
broker.put_container('b', normalize_timestamp(time()), 0, 0, 0)
broker.put_container('b/a', normalize_timestamp(time()), 0, 0, 0)
broker.put_container('b/b', normalize_timestamp(time()), 0, 0, 0)
broker.put_container('c', normalize_timestamp(time()), 0, 0, 0)
listing = broker.list_containers_iter(15, None, None, None, None)
self.assertEquals(len(listing), 10)
self.assertEquals([row[0] for row in listing],
['a', 'a/', 'a/a', 'a/a/a', 'a/a/b', 'a/b', 'b',
'b/a', 'b/b', 'c'])
listing = broker.list_containers_iter(15, None, None, '', '/')
self.assertEquals(len(listing), 5)
self.assertEquals([row[0] for row in listing],
['a', 'a/', 'b', 'b/', 'c'])
listing = broker.list_containers_iter(15, None, None, 'a/', '/')
self.assertEquals(len(listing), 4)
self.assertEquals([row[0] for row in listing],
['a/', 'a/a', 'a/a/', 'a/b'])
listing = broker.list_containers_iter(15, None, None, 'b/', '/')
self.assertEquals(len(listing), 2)
self.assertEquals([row[0] for row in listing], ['b/a', 'b/b'])
def test_chexor(self):
broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1'))
broker.put_container('a', normalize_timestamp(1),
normalize_timestamp(0), 0, 0)
broker.put_container('b', normalize_timestamp(2),
normalize_timestamp(0), 0, 0)
hasha = hashlib.md5('%s-%s' %
('a', '0000000001.00000-0000000000.00000-0-0')
).digest()
hashb = hashlib.md5('%s-%s' %
('b', '0000000002.00000-0000000000.00000-0-0')
).digest()
hashc = \
''.join(('%02x' % (ord(a)^ord(b)) for a, b in zip(hasha, hashb)))
self.assertEquals(broker.get_info()['hash'], hashc)
broker.put_container('b', normalize_timestamp(3),
normalize_timestamp(0), 0, 0)
hashb = hashlib.md5('%s-%s' %
('b', '0000000003.00000-0000000000.00000-0-0')
).digest()
hashc = \
''.join(('%02x' % (ord(a)^ord(b)) for a, b in zip(hasha, hashb)))
self.assertEquals(broker.get_info()['hash'], hashc)
def test_merge_items(self):
broker1 = AccountBroker(':memory:', account='a')
broker1.initialize(normalize_timestamp('1'))
broker2 = AccountBroker(':memory:', account='a')
broker2.initialize(normalize_timestamp('1'))
broker1.put_container('a', normalize_timestamp(1), 0, 0, 0)
broker1.put_container('b', normalize_timestamp(2), 0, 0, 0)
id = broker1.get_info()['id']
broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id)
items = broker2.get_items_since(-1, 1000)
self.assertEquals(len(items), 2)
self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items]))
broker1.put_container('c', normalize_timestamp(3), 0, 0, 0)
broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id)
items = broker2.get_items_since(-1, 1000)
self.assertEquals(len(items), 3)
self.assertEquals(['a', 'b', 'c'],
sorted([rec['name'] for rec in items]))
def premetadata_create_account_stat_table(self, conn, put_timestamp):
"""
Copied from swift.common.db.AccountBroker before the metadata column was
added; used for testing with TestAccountBrokerBeforeMetadata.
Create account_stat table which is specific to the account DB.
:param conn: DB connection object
:param put_timestamp: put timestamp
"""
conn.executescript("""
CREATE TABLE account_stat (
account TEXT,
created_at TEXT,
put_timestamp TEXT DEFAULT '0',
delete_timestamp TEXT DEFAULT '0',
container_count INTEGER,
object_count INTEGER DEFAULT 0,
bytes_used INTEGER DEFAULT 0,
hash TEXT default '00000000000000000000000000000000',
id TEXT,
status TEXT DEFAULT '',
status_changed_at TEXT DEFAULT '0'
);
INSERT INTO account_stat (container_count) VALUES (0);
""")
conn.execute('''
UPDATE account_stat SET account = ?, created_at = ?, id = ?,
put_timestamp = ?
''', (self.account, normalize_timestamp(time()), str(uuid4()),
put_timestamp))
class TestAccountBrokerBeforeMetadata(TestAccountBroker):
"""
Tests for swift.common.db.AccountBroker against databases created before
the metadata column was added.
"""
def setUp(self):
self._imported_create_account_stat_table = \
AccountBroker.create_account_stat_table
AccountBroker.create_account_stat_table = \
premetadata_create_account_stat_table
broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1'))
exc = None
with broker.get() as conn:
try:
conn.execute('SELECT metadata FROM account_stat')
except BaseException, err:
exc = err
self.assert_('no such column: metadata' in str(exc))
def tearDown(self):
AccountBroker.create_account_stat_table = \
self._imported_create_account_stat_table
broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1'))
with broker.get() as conn:
conn.execute('SELECT metadata FROM account_stat')
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
d703574693e9918ade0ab71c8afb819c584424ab | cb8431a306af2fabf37b74f68b5bd3fdc4cae134 | /etlt/dimension/RegularDimension.py | 9eca77137611940200b8f332dbd90e76a4cd545a | [
"MIT"
] | permissive | e7dal/py-etlt | fb5fcc25cd5ab33c6d02f37ab8421aefe877753c | 1c5b8ea60293c14f54d7845a9fe5c595021f66f2 | refs/heads/master | 2020-05-16T19:12:27.838844 | 2019-04-10T10:02:22 | 2019-04-10T10:02:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,834 | py | """
ETLT
Copyright 2016 Set Based IT Consultancy
Licence MIT
"""
import abc
class RegularDimension(metaclass=abc.ABCMeta):
"""
Abstract parent class for translating natural key to a technical key of a regular dimension.
"""
# ------------------------------------------------------------------------------------------------------------------
def __init__(self):
"""
Object constructor.
"""
self._map = {}
"""
The map from natural keys to a technical keys.
:type: dict[T, int|None]
"""
# Pre-load look up data in to the map.
self.pre_load_data()
# ------------------------------------------------------------------------------------------------------------------
def get_id(self, natural_key, enhancement=None):
"""
Returns the technical ID for a natural key or None if the given natural key is not valid.
:param T natural_key: The natural key.
:param T enhancement: Enhancement data of the dimension row.
:rtype: int|None
"""
# If the natural key is known return the technical ID immediately.
if natural_key in self._map:
return self._map[natural_key]
# The natural key is not in the map of this dimension. Call a stored procedure for translating the natural key
# to a technical key.
self.pre_call_stored_procedure()
success = False
try:
key = self.call_stored_procedure(natural_key, enhancement)
success = True
finally:
self.post_call_stored_procedure(success)
# Add the translation for natural key to technical ID to the map.
self._map[natural_key] = key
return key
# ------------------------------------------------------------------------------------------------------------------
@abc.abstractmethod
def call_stored_procedure(self, natural_key, enhancement):
"""
Calls a stored procedure for getting the technical key of a natural key. Returns the technical ID or None if
the given natural key is not valid.
:param T natural_key: The natural key.
:param T enhancement: Enhancement data of the dimension row.
:rtype: int|None
"""
raise NotImplementedError()
# ------------------------------------------------------------------------------------------------------------------
def pre_load_data(self):
"""
Can be overridden to pre-load lookup data from a dimension table.
:rtype: None
"""
pass
# ------------------------------------------------------------------------------------------------------------------
def pre_call_stored_procedure(self):
"""
This method is invoked before call the stored procedure for getting the technical key of a natural key.
In a concurrent environment override this method to acquire a lock on the dimension or dimension hierarchy.
:rtype: None
"""
pass
# ------------------------------------------------------------------------------------------------------------------
def post_call_stored_procedure(self, success):
"""
This method is invoked after calling the stored procedure for getting the technical key of a natural key.
In a concurrent environment override this method to release a lock on the dimension or dimension hierarchy and
to commit or rollback the transaction.
:param bool success: True: the stored procedure is executed successfully. False: an exception has occurred.
:rtype: None
"""
pass
# ----------------------------------------------------------------------------------------------------------------------
| [
"[email protected]"
] | |
d8f484ff8803152cae0d30c0990aa7841c72c689 | cad2908abb7b2a649ec2382309f56e6c95ee834a | /course3/principles/inheritance.py | dcfa43ec8c6726bfa54b00ffde8e352d592ec16b | [] | no_license | andyzt/tceh-python | 619b6eec0897e3b3671d416d6eb7346f69730747 | de74cb7fffea3528cd2a3035b0a9d53c9dca0c6b | refs/heads/master | 2021-01-16T00:17:02.198730 | 2016-02-26T16:00:16 | 2016-02-26T16:00:16 | 52,616,472 | 4 | 3 | null | 2016-02-26T16:15:06 | 2016-02-26T16:15:06 | null | UTF-8 | Python | false | false | 1,578 | py | # -*- coding: utf-8 -*-
from __future__ import print_function
__author__ = 'sobolevn'
class Parent(object):
def __init__(self):
print('Parent inited')
self.value = 'Parent'
def do(self):
print('Parent do(): %s' % self.value)
@staticmethod
def static_do():
print('Parent static_do()')
@classmethod
def class_do(cls):
print('Parent class_do(): %s' % cls)
class Child(Parent):
def __init__(self):
super(Child, self).__init__()
print('Child inited')
self.value = 'Child'
@staticmethod
def static_do():
print('Child static_do()')
class Mixin(object):
@classmethod
def class_do(cls):
print('Mixed: %s' % cls)
class MixedChildOne(Parent, Mixin):
pass
class MixedChildTwo(Mixin, Parent):
pass
class MixedChildThree(Parent, Mixin):
@classmethod
def class_do(cls):
Mixin.class_do()
if __name__ == '__main__':
Parent.static_do()
Parent.class_do()
parent = Parent()
parent.do()
Parent.do(parent) # do not use this!
parent.class_do()
parent.static_do()
parent.__class__.class_do()
parent.__class__.static_do()
# Child:
Child.static_do()
Child.class_do()
child = Child()
child.do()
# Mixins:
mixin1 = MixedChildOne()
mixin1.class_do()
print(mixin1.__class__.__mro__)
mixin2 = MixedChildTwo()
mixin2.class_do()
print(mixin2.__class__.__mro__)
mixin3 = MixedChildThree()
mixin3.class_do()
print(mixin3.__class__.__mro__)
| [
"[email protected]"
] | |
83cafd706a1d76e745e3773b8311bb5fe4844a10 | 594bd1d3afa4c74c577e6c5d7f8e71d4835c7734 | /MainApp/migrations/0093_auto_20161118_1232.py | a9c950e213543791b6e39e28c16dcc7152a95d9b | [] | no_license | CoriAle/app | 5a930b4460a5a79e4d2d97b0de205c050c196a53 | 280313f86db0ba9a754ff52dc8a37bf6420554d1 | refs/heads/master | 2023-01-23T22:45:15.127029 | 2018-07-03T01:59:19 | 2018-07-03T01:59:19 | 136,048,276 | 0 | 0 | null | 2023-01-12T05:12:39 | 2018-06-04T15:49:01 | JavaScript | UTF-8 | Python | false | false | 481 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-11-18 18:32
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('MainApp', '0092_remove_pagopersonal_vales'),
]
operations = [
migrations.AlterField(
model_name='persona',
name='fecha_pago',
field=models.DateTimeField(auto_now_add=True, null=True),
),
]
| [
"[email protected]"
] | |
444343a37dcff36a26a64f7da7ac99a29f078c08 | 0b4d1fb57546adbc85659a144742c4ecd9dfe219 | /src/genie/libs/parser/ios/tests/ShowInterfacesSwitchport/cli/equal/golden_output_2_expected.py | 5515be0b5806e40b6a2d632d373a55a7ec3474a5 | [
"Apache-2.0"
] | permissive | oboehmer/genieparser | dcc4fd0c6611ab4d799928ce6d2b55a2ad7a64d2 | e88d02c08a3968d38ba90121b46af614715c5ecc | refs/heads/master | 2021-07-11T17:04:11.198119 | 2020-12-02T20:34:24 | 2020-12-02T20:34:24 | 222,627,198 | 1 | 0 | Apache-2.0 | 2019-11-19T06:43:15 | 2019-11-19T06:43:14 | null | UTF-8 | Python | false | false | 1,826 | py | expected_output = {
"Port-channel12": {
"operational_mode": "trunk",
"switchport_mode": "trunk",
"access_vlan_name": "default",
"private_vlan": {},
"switchport_enable": True,
"native_vlan_tagging": True,
"negotiation_of_trunk": False,
"encapsulation": {
"native_vlan": "1",
"native_vlan_name": "default",
"operational_encapsulation": "dot1q",
"administrative_encapsulation": "dot1q",
},
"port_channel": {
"port_channel_member_intfs": ["TenGigabitEthernet1/1/2"],
"port_channel_member": True,
},
"pruning_vlans": "2-1001",
"access_vlan": "1",
"unknown_multicast_blocked": False,
"trunk_vlans": "1,111,130,131,400,405,410,420,430,439-442,450,451,460,",
"unknown_unicast_blocked": False,
},
"TenGigabitEthernet1/1/2": {
"access_vlan": "1",
"operational_mode": "trunk",
"switchport_mode": "trunk",
"access_vlan_name": "default",
"switchport_enable": True,
"private_vlan": {},
"capture_mode": False,
"trunk_vlans": "1,111,130,131,400,405,410,420,430,439-442,450,451,460,",
"capture_vlans": "all",
"negotiation_of_trunk": False,
"unknown_multicast_blocked": False,
"port_channel": {
"port_channel_int": "Port-channel12",
"port_channel_member": True,
},
"native_vlan_tagging": True,
"encapsulation": {
"native_vlan": "1",
"native_vlan_name": "default",
"operational_encapsulation": "dot1q",
"administrative_encapsulation": "dot1q",
},
"unknown_unicast_blocked": False,
"pruning_vlans": "2-1001",
},
}
| [
"[email protected]"
] | |
04eed828b4817621a1725d5e816cab6a74e057de | bdb2506fb9562005c2f1b4c88330fa108f6219db | /appliedunis/urls.py | c7c7f205168a25ffeb28c2ff5a8af1b3568084a0 | [] | no_license | naeem23/University-Admission-Assistant---UAA | abc68766585d8a4e69de142cd077ad3a1c372162 | 744f14b5bbdd1ff96c6a01967946278813a5b6db | refs/heads/master | 2023-05-13T00:15:11.138977 | 2019-08-04T13:24:58 | 2019-08-04T13:24:58 | 200,491,140 | 1 | 0 | null | 2023-04-21T20:35:21 | 2019-08-04T12:39:09 | Python | UTF-8 | Python | false | false | 372 | py | from .import views
from django.contrib.auth.decorators import login_required
from django.urls import path, re_path
app_name = 'appliedunis'
urlpatterns = [
path('api/cancel', login_required(views.cancelApi), name='delete_api'),
path('api/apply/', login_required(views.applyApi), name='api_uni'),
path('api/read/', login_required(views.markAsRead), name='read'),
]
| [
"[email protected]"
] | |
c2c4dda9f0a19e4d5ad541b09f146e69e4f014f5 | 4d939137a35d9bcf118ebfd5fe5db6157df93b99 | /index_update_mq/goods_data_updateOld.py | 74a4ba57cf6ce79b99d8311cae5b02fae7571eb0 | [] | no_license | xiejunbiao/search_goods | 4f91a1e5891d4553914a4bdc9e65f667c802dcfb | 7aca0da499800777858d5e862bee95e538f29432 | refs/heads/master | 2022-12-12T00:57:18.631340 | 2020-08-26T13:38:25 | 2020-08-26T13:38:25 | 290,495,212 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,387 | py | import os, datetime
from whoosh.index import create_in, open_dir
from whoosh.writing import AsyncWriter
from searchhotmain.index_create import cshi
from whoosh.fields import Schema, TEXT, ID, NUMERIC, STORED
from searchmatch.index_update_cre import iu
# 搜索和热搜共用一个索引index同步
# pathDir = os.path.dirname(__file__)
# pathDir=os.path.dirname(os.getcwd())#上一级目录---当运行顶层目录的主程序的时候,这个命令导致输出的是顶层目录的上一层!
pathDir=os.path.abspath(os.path.dirname(os.path.dirname(__file__)))##这个才是文件所在的上级目录
# print(pathDir)
path_index_online = os.path.join(pathDir, 'searchhotmain', 'index_all', 'index_online', 'index')
path_index_spu_search = os.path.join(pathDir, 'searchmatch', 'index_all', 'index_spu_search', 'index')
def delete_search(area_code, goods_data_dict):
path_index_spu_search_ac = path_index_spu_search + "_" + area_code
if os.path.exists(path_index_spu_search_ac):
ix_online = open_dir(path_index_spu_search_ac)
# writer=ix_online.writer()
writer = AsyncWriter(ix_online)
writer.delete_by_term(fieldname='spu_code', text=goods_data_dict["spuCode"])
# writer.delete_by_term('spu_code', spu_code)
writer.commit(optimize=True) # 还是会有两个seg文件
else:
pass
def delete_hot(area_code, goods_data_dict):
path_index_online_ac = path_index_online + "_" + area_code
if os.path.exists(path_index_online_ac):
ix_online = open_dir(path_index_online_ac)
# writer=ix_online.writer()
writer = AsyncWriter(ix_online)
writer.delete_by_term(fieldname='spu_code', text=goods_data_dict["spuCode"])
writer.commit(optimize=True) # 还是会有两个seg文件
"""
更新最终的热搜过滤索引
"""
cshi.create_index_hot_filter(area_code)
else:
pass
def add_search(area_code, goods_data_dict):
path_index_spu_search_ac = path_index_spu_search + "_" + area_code
if not os.path.exists(path_index_spu_search_ac):
os.makedirs(path_index_spu_search_ac)
ix = create_in(path_index_spu_search_ac,
iu.schema_spu_search) # 运行两遍之后,又出现了之前的错误!PermissionError:[Errno 13]Permission denied
else:
ix=open_dir(path_index_spu_search_ac)
writer = AsyncWriter(ix)
updated_time_t = iu.getmtime_of_timestamp(str(goods_data_dict['updatedTimeDot']))
writer.delete_by_term(fieldname='spu_code', text=goods_data_dict["spuCode"])
# writer.commit(optimize=True) # 还是会有两个seg文件
writer.add_document(updated_time_dot=updated_time_t
, spu_code=goods_data_dict['spuCode']
, spu_name=goods_data_dict['spuName']
, shop_name=goods_data_dict['shopName']
, goods_brand=goods_data_dict['goodsBrand']
, goods_short_edit=goods_data_dict['goodsShortEdit']
, spu_cate_first=goods_data_dict['spuCateFirst']
, spu_cate_second=goods_data_dict['spuCateSecond']
, spu_cate_third=goods_data_dict['spuCateThird']
, spu_cate_third_edit=goods_data_dict['spuCateThirdEdit']
, shop_code=goods_data_dict['shopCode']
, sale_month_count=goods_data_dict['saleMonthCount']
, sale_price=goods_data_dict['salePrice'])
writer.commit(optimize=True) # 还是会有两个seg文件
def add_hot(area_code, goods_data_dict):
path_index_online_ac = path_index_online + "_" + area_code
if not os.path.exists(path_index_online_ac):
os.makedirs(path_index_online_ac)
ix = create_in(path_index_online_ac,
cshi.schema_online) # 运行两遍之后,又出现了之前的错误!PermissionError:[Errno 13]Permission denied
else:
ix=open_dir(path_index_online_ac)
writer = AsyncWriter(ix)
# updated_time_t = iu.getmtime_of_timestamp(str(goods_data_dict['updatedTimeDot']))
writer.delete_by_term(fieldname='spu_code', text=goods_data_dict["spuCode"])
# writer.commit(optimize=True) # 还是会有两个seg文件
writer.add_document(spu_code=goods_data_dict['spuCode'],
goods_short=goods_data_dict['goodsShortEdit']
)
writer.commit(optimize=True) # 还是会有两个seg文件
"""
更新最终的热搜过滤索引
"""
cshi.create_index_hot_filter(area_code)
def update_hot(goods_data_dict, hot_or_search):
if goods_data_dict["goodsStatus"] == '0':
for area_code in goods_data_dict["areaCodes"]:
if hot_or_search:
delete_hot(area_code, goods_data_dict)
else:
delete_search(area_code, goods_data_dict)
else:
# 循环小区
area_codes_old=goods_data_dict["areaCodesOld"]
if len(area_codes_old)==0:
pass
else:
for area_code in area_codes_old:
if hot_or_search:
delete_hot(area_code, goods_data_dict)
else:
delete_search(area_code, goods_data_dict)
area_codes=goods_data_dict["areaCodes"]
if len(area_codes)==0:
pass
else:
for area_code in area_codes:
if hot_or_search:
add_hot(area_code, goods_data_dict)
else:
add_search(area_code, goods_data_dict)
# 循环小区
# if len(goods_data_dict["areaCodesOld"]) == 0: # 验证一下刘尊彦传过来的是否是list类型?
# if not os.path.exists(path_index_online):
# os.makedirs(path_index_online)
# ix = create_in(path_index_online,
# iu.schema_spu_search) # 运行两遍之后,又出现了之前的错误!PermissionError:[Errno 13]Permission denied
#
# writer = AsyncWriter(ix)
# updated_time_t = iu.getmtime_of_timestamp(str(goods_data_dict['updatedTimeDot']))
# writer.add_document(updated_time_dot=updated_time_t
# , spu_code=goods_data_dict['spuCode']
# , spu_name=goods_data_dict['spuName']
# , shop_name=goods_data_dict['shopName']
# , goods_short_edit=goods_data_dict['goodsShortEdit']
# , spu_cate_third_edit=goods_data_dict['spuCateThirdEdit']
# , shop_code=goods_data_dict['shopCode']
# , sale_month_count=goods_data_dict['saleMonthCount']
# , sale_price=goods_data_dict['salePrice'])
# writer.commit(optimize=True) # 还是会有两个seg文件
# else:
#
# ix_online = open_dir(path_index_online)
# # writer=ix_online.writer()
# writer = AsyncWriter(ix_online)
# if goods_status == '0':
# writer.delete_by_term(fieldname='spu_code', text=spu_code)
# # writer.delete_by_term(fieldname='spu_code', text='skjdalkjfalj')##不存在也可以删除
# else:
# writer.update_document(spu_code=spu_code, goods_short=goods_short,
# goods_brand='测试品牌1') # 为了满足小区配送的数据同步,先删除,再增加
# writer.commit(optimize=True)
def update_status_online_index(goods_data_dict):
# 增加商品搜索的索引同步
# area_codes=goods_data_dict["areaCodes"]
# spu_code=goods_data_dict["spuCode"]
# goods_short=goods_data_dict["goodsShortEdit"]
# goods_status=goods_data_dict["goodsStatus"]
#
# area_codes_old = goods_data_dict["areaCodesOld"]
# spuName = goods_data_dict["spuName"]
# spuName, goods_short_edit, goods_brand, spu_cate_first, spu_cate_second, spu_cate_third,
# spu_cate_third_edit, sale_price, sale_month_count, shop_name, shop_code, updated_time_dot
# for area_code in goods_data_dict["areaCodes"]:
# path_index_online=self.path_index_online+"_"+area_code
"""
注意检查是否存在index索引,不存在的话,跳过
不存在的话,可能是新的小区,应该全量创建?
首次上架,与非首次上架的区别。首次上架的时候,小区配送范围还没有。
==
传参:spu_code,旧的小区范围areaCodesOld(删除),新的小区范围areaCodes(添加),上架1,下架0
(1) 商品上架1,首次上架,areaCodesOld是空,非首次上架,areaCodesOld非空。
空的时候,不用删除,直接添加(create创建索引),
(2) 非空的时候,先删除再添加(open打开索引)。删除的时候,先判断是否存在索引目录,如果不存在,忽略。添加的时候,先判断是否存在索引目录,如果不存在,则创建。
小区更新只有一种goods_status=1,相同的逻辑,空的时候,不用删除,直接添加,非空的时候,先删除再添加。
(3) 商品下架0,根据新的areaCodes删除即可。先判断是否存在索引目录,如果不存在,忽略。
(4) 每天mysql会有下架的delete删除操作吗?那edit字段怎么获得??不应该定时删除下架,注释掉。先不传edit字段,我用定时更新同步。应该备份edit字段。
"""
update_hot(goods_data_dict, hot_or_search=True)
update_hot(goods_data_dict, hot_or_search=False)
def update_index_online(spuDict):
"""
更新在线索引
"""
"""
更新上下架信息--在线索引
删除delete
更新update
只关心下架,搜索查询下架商品是否存在于热搜,如果存在,直接重新计算一次热搜词?
热搜索引(离线)是离线计算的,预留足够的数量1000,不需要更新。每次调用,取前10个,至少取10个
只更新在线索引表,上架update,下架delete
"""
"""
把spu_code对应的小区,都应该更新一遍
"""
goods_data_list=spuDict['goodsData']
for goods_data_dict in goods_data_list:
update_status_online_index(goods_data_dict)
def goods_data_update():
# 商品数据状态同步更新
print("----update goods data")
keys = ['spuName', 'shopName', 'goodsBrand', 'spuCateFirst', 'spuCateSecond', 'spuCateThird', 'spuCateThirdEdit',
'shopCode', 'saleMonthCount', 'salePrice']
time_loc = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')
# spu_code应该是唯一索引,add_document由于某个字段变化,如时间,会重复插入多个spu_code相同的数据!用update
# 在模式中将更多字段标记为“唯一”将使每个字段
# update_document
# 稍微慢一点。
#
# 当您更新多个文档时,批量删除所有更改的文档,然后使用
# add_document
# 添加替换而不是使用
# update_document.
goods_data_dict1 = {"goodsStatus": '1',
"goodsShortEdit": '榴莲',
"spuCode": '407754603128160256', ##删除榴莲
"areaCodes": ['A2018012300015', '-1'],
"areaCodesOld": ['A2018012300015Test', '-1'],
"updatedTimeDot": time_loc}
goods_data_dict2 = {"goodsStatus": '1',
"goodsShortEdit": '榴莲 榴莲 你好测试',
"spuCode": '19', ##擦,终于找到原因了,update的时候,相同的spu_code覆盖了
"areaCodes": ['A2018012300015', '-1'],
"areaCodesOld": ['A2018012300015', '-1Test'],
"updatedTimeDot": time_loc}
for i in keys:
goods_data_dict1[i] = '0'
goods_data_dict2[i] = '0'
##擦,终于找到原因了,update的时候,相同的spu_code覆盖了
goods_data_list = [goods_data_dict1, goods_data_dict2]
# goods_data_list=[goods_data_dict1,goods_data_dict1]
spuDict = {"goodsData": goods_data_list}
update_index_online(spuDict)
if __name__=="__main__":
# goods_data_update()
pass
| [
"[email protected]"
] | |
99178d3942f39b0b5f1f5d1954c4b20943bef419 | ff6f60d02ed8d024f7b2db5c9eb4b1196ebf166b | /my_flask/app/models/book.py | 1d81876b0b33e29edad6d19583515c95e0fcb3ff | [] | no_license | cekong/learnit | 43b707e347ff552754b6592e01dd106c98cd0cc5 | b4111d6fee95960f7b7ca5421b7159cb6122ad2a | refs/heads/master | 2020-03-25T13:53:37.848843 | 2019-08-29T06:46:48 | 2019-08-29T06:46:48 | 143,848,485 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,245 | py | ''''''
'''
模型层
https://coding.imooc.com/lesson/194.html#mid=12779 4-8 定义第一个模型类
sqlalchemy
Flask_SQLAlchemy
SQLAlchemy 是Python 社区最知名的 ORM 工具之一,为高效和高性能的数据库访问设计,
实现了完整的企业级持久模型。
ORM(对象关系映射)将数据库中的表与面向对象语言中的类建立了一种对应关系。
'''
from sqlalchemy import Column,Integer,String,Time
from app.models.base import db,Base
#用代码创建数据表
class Book(Base):
id=Column(Integer,primary_key=True,autoincrement=True)
#autoincrement自增长
title=Column(String(50),nullable=False)#nullable设置此值不为空
author=Column(String(30),default='佚名')#当此值为空时,默认设置为佚名
binding = Column(String(20))
publisher=Column(String(50))
price=Column(String(30))
pages=Column(Integer)
pubdate=Column(String(20))
isbn = Column(String(15),nullable=False,unique=True)#unique此值唯一,不能重复
summary=Column(String(1000))
image=Column(String(50))
image = Column(String(50))
#MVC M Model 只有数据
#ORM 对象关系映射
def sample(self):
pass | [
"[email protected]"
] | |
decb9b488193fb175a4eb1303735fb743a3e31a8 | fb5ec592c02138ba21c50c703b396b0d5e64f0a1 | /VQumKNDIX2GJvgSE/84gKy0jxFEoTDRZh.py | 61cff5dd2555c8ca2d92603cddf6aacd3b03bdc9 | [] | no_license | urlib/N56TKH | 94cc5a800c1ab489ba7d2efc4314994682b97508 | a6e01beeeafcd2d157cf18ebd0491c91035a8f0d | refs/heads/master | 2021-05-19T18:22:44.833979 | 2020-04-15T05:40:36 | 2020-04-15T05:40:36 | 252,056,889 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 28,823 | py | 𢦖庨誸𨮝𭐓뎅𒈘𦦭𥞲𩄲🃌鍭쑼𩓂𘃠斨⃒뇛ɍ𤊐🗱齻𨩫𡳆𨝗𤐻﹑𭇴𠑲𫴾𣬬䩜𧢜𣬡𝍨𮅬餚ⳟ鉰𐄽芎𨪣䫈ᇤ𫠔𮨋Ū焠싽𓐙𨃮庱腿𑩕췒𭴐𭐂𩶦𘕏𤭭𛋰𠢶𠮭럳Ů䉶纣ܶ𢏂៤𧜮𬓴𨂀𨷞𘂱爇წ慲٨𧩆𣍥𨙶ⶁ駂輥𔒯䔲䊞𗱯槶𥢫𫹁诪麌𪊹𫶵𢑴𤤹𭺔踗繍揄𠹬𩴿윒𫐶𥓐𡍨𫅴𒆈𧉢𘙐亽𔖳𡂴𧔬𢅂𥢓𪫋𤷺𣮇匍𥠿ꗇ𨐢𥠶ߤ𧃰䮶钍𞲧䀱𗗧疣활𤤊𓐅穪𑰋踼젒𨋻𩞋𭴫ຸ㣪덖𦡬鋈𣞤𧒄𧠟𒑇鰁ꃿ𢓭㿨Ḡ𗲸𢥺𩒝𪁹𨰑𘡀𐙫棼ퟨ安𗘰帚𡇌䳮ჴ⦶༜𐩨䧸𠱿隭𑨈沦룰𬏎쥖𡠋䣄㊳𢺾𒒏𧗧ᬑ𝀏𘂇汎𠍺깋𫕞䰵㫵믚𭅙𣰉拎𣭬膿䤙剀屉Ꮷ䜖𝆘𩲼玛𢉉냫𩱞𧘯𗸟ऑ樇պ𭗅𩢪𑱅𬍈𬈤扐𘕧𠄐ᦃ摎꯴𤰯붰擟ꘂ儲妴𡹃𠉓𐑸𘆩𤯟𧽈𞸀𬹏𡽀𬋮𮘇𛁢𢽜𦅰𩩀𣽚𥛜𭃆⯃𖭠𐛣𘧧ꑏ昳膮𗬂З𢹫蕎𭂘喝𗮘𧝸㿈雿𤯫촴ℋ𬹹𤘪꘍蚼㵏놎磰팭𭺐𭦍沑𝦀𫐷✛𤺁𧠉뛭𑑖𭾩夠ꏱ픭ኄ䋮𛆊鱀圙𝓬𘤸𒌙𤢭𧤁쮌𧎂鬗𨲓ڤ𩢘ႊມ珴𮬄🞵䆼兣𪗚𬟝䀳𫺛湲𫀯𗩃𨃰𬅐𐡦𨝁峒㲼𡮦𪬌𨺼া𘇗𒇂𥗰澝皼罉績𣲷𓆃𪽸㙊Ꭴ󠅎뺃𤚚ꠠ𩍄𐄀𛱺𡈡𪣐䠻𥅶䤊唿𬴽𬧒즺凜𐳥𗼧𤾟嚤𠢛𤳂㱔럛𪱑𨜐𭼟𨠰𮦛陧擠⛨庼ज़奚𥶕𑨰𨵾𗄡ヸ𫼙𩹃𮭱𠓰럳𣷡眓🄚쉴𭬤𥕙𩧶퇡𮦲╏𧏐𨢩𝁽𭙋蠹𡐮𪼵𦵣∰적㷟𘍘𢡧菆𡰄𬹥𠽓𮣨𨍅𥁓𩿅ퟎ쨐㡚彑ḇ𣰯𧞂𩉂躕𩜨ƃ잓𓍾𩮯𔒴𩒈楪ꮆ럖🍗쨄𨲈믥羪𑠘凴鵜𨎣ꀳ𨼇𐁇𮀈壙𒁎𥂶𫰦㧫𓂵𨏼𪔏㨮𠴸🛎➇𖦧柉瑯𥿕쫜옽𫉱𔐭뽻𣖸언𡽡𬙄𧫘𡫷쇞𪛒𫄇𧽆𪽆𗃽嘾퓑𨡍𒆈𥯈缀𬠸褁껀𬹞ᤔ殐⣰𠶟𖾓迠𗞗𭄗밉𢍺婽玻𔙆𤠥䖢닶𢿀𨅻ꍃ弄赚@𢸣𤎢𡸯啁𒈊𧍻黎瓌𐦚얕曋𗷤𠠢𪔬𢼟Ɨ𣑷𤊎줃𗇗𑅤𣜟𩱚𧖴컫𣲒䪂ꯠ淳𮛑쁯𑊜䫝𝍓𩉉𣈾𥎫𒊰𥞛욈𫇇𣟴⑱𡈹𒂢𤁕T𓂪矠𢓏ꝇ紙𨣐𤈄𣹫萒𡰕ѵ𮆀𗍣鋿輈ޏ⊓𠴕𑚦퀵쎥𐊨𨏸𗴾𫇓룍𠅇㸣𗹁羋䉕𧕙䫦𡉳𨜆𓎊讌𮑍𣥥𡙆$鸼宔ꘊ얱ﯱ𪫍鰛䵕썁𑻬劭𡱬𭓻𨱊♉﮻𬓝𮕿麉槽𨼣艹𗯇𠈎圈𠹪ᵪ𡸝𣬪겻𦦣홾낭䏞阪𧈛👷𤎊𨷇拏㷕𑂲◁𡊿氥𨁰ﴃۀ𫕐㗕棽𨗝ꫨ䵕㱵𮇳𘡟ี珦🖁𢘸杞☐筗𧩬🩢𭊨䤰𡮘詶𨗂𥼈จ뢱璦𥐭𡟽𢀽𨊋𪎧ﰍ𠹎𢓂𭱻㐁𣧕𧠠𨤖𫓣劙🥑𞡍𫫗훉𥔍𓊸ᙦ𢑃훑𑒛𦉨𮂅𦮕쯱胂⢟𭈸笘𠾸𣐜𥀽☠𦦀皩ͬ𣰱𗈳𩍼だ픭盚𝈛𣒉쩝霤𩫐𔘯冝𬵜皓𘘢뻔𮛶楃˲𮀆𦫋𢧂𧋫𑄜𨌭𘃷𔖟𠏝կ屿텪𐊎𭧭𭥁𧗳𦱥𨖄𞲦┥셚{㾎⥆𦖩ᝦ𣗧𐽔ꀮ𮡝쐨𝀃㿬𢳪𤞨𘁍𗘒𮥇𦅃𗽭𧁸솫𫲥⟮𡬹垊𣾵璪𗃇𖮃𢧐꼀𡾯𨛴𧵉𪭠鍅𩉕𪰆햣𢂲𥪤𢹊𛄛簡𤥝𭘁𧺇ᬈ𫁺ﴫ𗍯椁ᥫ𑠶𬃙े舨˛𗢠𫰉㗪𩕁𦞩⻯𣔦𦟠𦭋𥍦𨐕𮧪嘋𢈹轺𦙦𐠗𐛓𢏝𣛹𢽿𢵎𤨴㿻𧤈𗨒𤴈왰𥩞𠓺𡯽𬞓𦖆嫁𤏸𬶸𦳸𬌚슢𨸋𘒪𡒘볰𦩷𣧎𧺘𦽴𭌣𩔒⽩ԝ𗫵㖻𧯒𦆉𪎢鶟𘆒𦧸𦟹𣛦㜝𮧮𩄰𭻼𫙂𩬴ﶖ𒅻𨾆䷷𭰦𠈋፹𮌦𞀄䀩덀𦨍𢛴珌𡣛𒍋𡖷𤂙뗄ₙ𩡢갠𭳱峀秡𑠆殅𤲤𗡕ᆽ𮋿𡛠𢓶𑈉🡱𧪶鉪𞀛𡁓𪎪𓆨𝩛₨𒑗𨍫𬾇𫺡𫹦𠻚🧡𗐁𮝩𘗐𨾼𠔡閈ꌑ𦘂쿁◱𥚖𫶗쀰𨃤𣯓👒蛕㽳🚈𤦀𪮠턞𥗆ꁎ醧𠳰𮡋𡼆𢻽𣘞𧏡鮙ﰞ𮃙쿼𬗾髗ድ𬴩⽡𗋱挅탰𬪠𨾩𠴢숿ﳭ𫣒𫡦ꢞ쌃𤫻𑚚있𡪘𐣿𧔧🖼𔑱ט𧝲焏𦒐𩥞捞𭶂𠬣𓎝𘍞⍥𡈇݅可𪻫𪱽𗻖𘈛檉☩🨈𪖻𖣟𥚚𩰝脇𣑭𐃩簾𝡖㽼𑌝𣜒🙎𧬗鶌빇𫱆㶆𬎘𐽌🍜쪉끨𧷦𩛟𣻁𬃝싽🕌𠬌𭸤全켧🂄𫻇錶瘟𡛈ڐ𪙽𧨨𘥔𣹡𗾆𬅞쬖𐮅췤ဨ𢱓𢃅𛰤𮘽ཨ𩶶꾷𢧖𪊷옄얪𦏎Ꮫ𩷦𬵋𣧒🤮𧨜𠏛🩫ܩ𦎟𦒹𢀴𭘭𘅆𤫈𣴊𑣇況𬔺𥉥𝐏𖢗䮴𩫴痵客🅵𡤲𪬐鲤𫤡쨟𑑄𫚾𘚶趩𮢻𪇵𪁆𫎿ꯛ𭂉𞴭戏詂盠飥鱰𬂈㵿鎸䱅𥊁㿩㌻𑋀둪勛茈𒅄廟訳𘩚𑍄𧨙ॳ稐拦𭅍𝝀㿡𮚷𩜛贫䥤劌📍𦓗𧣯𒅠쪐𦴸乁𩫝𧯯ޛ𠗺얤𪽱𔔓𢩂𝅽𮄚𫓳𨪚𞺑𮙋鼲𮘎ޓ𨗔𥪙𪩜𑓙콚瑱𪧆𢞠𤪸𪮨쳿𘃊𪮀懨𝈾𒇳𑊘䦵𗶛쑛𘑟𬾖멮𦫯ʴ竩₩냴㯥𪨡𪍊𥸥婅嘕𪾉涊𭹨𦊪몢𮬡ᳩ䜢聰𝖼𧕾㉪枵𠯹𞠂㾅ʮద柤𣜑𭴦駟幠𦹔𩑒瘛𨷢䟍齻𡄹창𭝺𖥰𝄐𤹗𪧊犱𘋘𦠭𠍅ﴘ𠌁ꂀ𬃿𧻴𭗱𐮁𐚘蛽̇ᗁ﹡𘟃𩮁𤙰刺𗠥𐫯𣷘𬥃Ꭱ𢱃𠖡ဓ𢜚𗀺𗕹삠웥𡝩𣙖ဢ㬺拙𥊡𩝦𡵅𥍮𦗲졖𐙻𩴎㓟𬲔𬷽𪷢𬫖璂瞠𑍗头ਔ蜋莣𗝋𣵳𢢊𡰧퉷颂ʨ崜𫛂𞢼軒◉𭤚초𩑃𩾀ꗠ璅𧁏𭐯𘁹觛ည𧠢鑩ڑ𦼝댄鸢筨쵠📬𦰧𨧱⏤冀𮀦𡪆𞀌𧛒𤉲癣闢撼ᒃ𮌮𨧁𩝛ꦰ穓ਁ𒂵낗쁓㯔𛇗㶘𫻨⠉𬛴沏㮑𤗙𮒥𨮥𣅃𫳠厹ြ𣵧㪘𡽝ኻ俓闈𢭤𨷁𓇑𓉞ꇐꬰ𗵍𥜎霉𦥳𣃺Ə𧯖𘇷䷒𓄤𧉎䂈𗵏㷠😧ꮩ𭍐𐨁𤦡𤛥𥩿𣻷𓇑𗏏仢䘨𝅰ᔥ슬ዧ𠏅ፏ抉𦸭𭷑吻餦𪤽攗𧻋𬝗똙𫫯॒🃇𤞆㡋缊𫋹𬝅𤎌鴄𡼽𗾧ⶤ𢁡🗢虲𦇌𬿅𠌌𫏟𦶀𫨐𨪳📁兔뭔۶𝦬㿁𤧃𫅀𦐉㛞𣼶犤𩞈𨩩𢒿𞠌𢪀𫷋䎸싴𡃅𤈄槶𤏳𨸝🡺𬴫𮞫𠢕𘟌𨭵𢎹𢺸𮚀𡠌䕼𨭹𬢥ㄨ𐀄𥦓첈㥚둀竽䮏𤦩𬗑쨼𡎺𗕫𒇐𦠗𦓍𢪳𨙡溓議歬᭞𧗺䯦䵏𭊜𩄻𮚱𨍔𨶪𠯜𖦻걟Ⲫ𮁊뷞縒၀𬓺Ἔ𭬋𡕆𭘮뗒𡼙䃮漣绽霑𧑫늴⍛𬱄𦿝𦚸𮇿Γ𫺉䔡𘟗婐ⓢ𑴂𠲎灨姣ꨪ𮞖𨋔𝢟𧴌𧆵뙴𤘆𡹗𪗃𞲆𗻺𐪊𥒺셸娹涚𭒉𘆀揥𫕛𪺟΄댜𩒕𮗈뺯雭𞲂𗣍𢆼ҳ𤃰𠐊𪲝남㓟𞺢𠢄ꁤ𢄠〸㑡卭𠬤뀔榕𘤵𬎃钷𧜾𧙩🐎𦢦𢻞𧕀ᓧ𔑤𥃻𢎗𓆉𧁚𬣳𪯽ՠ쌒𧁆돩𠳎𥾈𣒅𛱆摵𫋳𮕉𧏗𫽍臻灎𑨂𠳔𧭾𥮔𡒺洆𘒝鍢𦳪𢡵壕恴𤡜끥𩻦恔𨆍递𤣯ꀲ𬰀趼覗𤑐ﮙ𣸨熡𤓜𥥚𨢤隼쁨𠓬蜛𦘆𪭫쌈𢷥𦋙߉鋿𒄖𝈬뉈淵𗱨圻垟𠉣⅂𥈷𫸎𩘴𬴜宰𠓖𐢆𫚵턯ᗞ嶱䏻㵹𢙛𪫈꼭𩧼𮩇𭆹𦶷𭶲㟶⊄𥎭⌝𡼩꽳處퉢漏皒𣪦𗎫䊈𡷞𥇒𪳩詨🩺𥵐휛ꛕ黯𣬔𢛟﨔𩓰𘕱𤁻𗄖২𨃉𣤈𩰴𥖮ꇅ𦧾𥨄𦅤𧌓𪪴픨𭥥𣕥𣏥ܐ볜𤞣充㮋樶𓎨𘊔𧮲㺿𨻛䰱𘄾ͩ𮊾𡣾𭾇𡶎𗧈𩁤𭡸趰𣃦𢠤🜡𡍲땠𮨡𫩒𨫥𥽐𗔚ꛢ𗖈검喜윈⚺窬𧰄𓏀𗉠荒뎘𫸶𓌢쓩𧗡𡻘餉𗗭耑𭩚薹ᒣ𦽭𬽞媬𒔐覅𑁮𗑽𦘔𫖜𗻔몥𩨥𥞠梱𨖇𗥭⯀𡷪̷𬓄𭾦𐌱ꁵ❑𐲘𗛟泜𪿵𣪟聡𦚗眱𠮑䓝泚𭐢糛🥼른𣱮𤈮𢀑𬫛ꢎ폔룩𢁍ᄣ瓁𘎸𨧫⩣㝮𢋢𡍏㋐羽𩉥㬩苄⣫𢤝⇯纡𝣇➎𧇌䣙Ḧ𑄮큵ͪ𘅦𖧡祖𧎐ڊ𩨵걧𢃺뜼뻵聄ᨩ𫃅𦱒ძꤖ𭂴𫇱𢄬䑰꘎鈝𨲯ℾ㹃紓𢤝𖭶𬲯𦍇ӫ隩ӈ𦘌탴邓じ𥂤𗸻𮫥𛊚㝧푌𬲨𝝞Ƿ켢𥋰𨪭𗙤ꀘ𡓻𫄺𧆪杍㷋𠹖𓃬𪁂툦𘨸𭺇䰉𩦠碦𤦕껸𒐯㳎𪎣𡄸㈝𫍰𭕹🨠𠄴𮭪𪑨𩭡𨫩㾃𩌨𢷵𞸡𨀚㮵ꭾ塜栟䆏嶺𘜨𘍶𣢼뱯𭙨𧬲𤳌勳𢣖𝛠풽𫃪𑃥𠎳薴婐𐀖𠼩Ϧ寳𥟢뿶𧇲𠒐𗼁㐊𣒭𤹡𥜗𓏌𮅩䠤쿷🍦𡎣𫔄㬄𫶸쟖嫞냌𡱈𬮬Я𧺓䃚ᯑ𬢼𭽣𬧶𣬃챙🎞𘛦Ҡ𥈎麛𫮈⩾輏뾁𖧤鵓櫜𭘻𒔒✼쉏𥑢𝪁㑑𥇾𠆹𗶱𦴓𘗉𡕌𠃎骋𫳲𬵒ꄯ淢篚锒玸𑆋𮪎𪫛𦺝𩋯𧛽𫏂𞹹𭊣䘱𭘣𤐼𘏍𐦆ꀁ𤂫𧺈킆펚𡇇伿𤘈뀗𐪆聩ﳃᔅ✈艜𪣐𫦖뤖䮬𡲕𤊟떉𧌽𧭿𩱳룶𐴱𑅳𬕪𤇒셳𖺈𩎙𑨛扳𑘇𤂓𥕀𮊽𩚣𧀈𢥑𧃘𝧢ែ𡸖鿤𞠺𨨾𤷉㬗ޒ⎲ꜰ𗂽랖𒁗覄쏶𠿫🗼䵖𝆹𝅥𒀀𥏮𣌥轒𑜦켛𣋓듹ᆟ𧔪𑶣鷪𤩉𦧇ᆆ⦋곭곚㱲맑𔑥𤺪𒓀鹟⹂뤤🞐𬠶𢩩𛄒嚇㍁𩤅𨖾𧌢𠔅俬╋秮𤏍𐦲𔙄곻飲㛈ꊿ쐁𥲌𢯍𫻠𡐎麆𬓔Ņ𠡫ﭞ𨑡媳𘜢𗦤䵑𥪙䈅𣏷灈𤠒𝌱𫨋𡬼麒𨸒薸𬧰𪍫笣𢐥𒀤𧳸𭰏𫕕𪾗𗟋ᅘ呋🐣𝟑𨩩𖠺𡕾🌈꤯燶ᖲ𬵖蓂𣚤톇뽕𠡽泐餶𨷝以𛂽𤼂퍒𭊓𥷛Ⳅ챏𨈍놈𡺳𓈈𬕞廁𦧥𦾱𠀘𣬽𦍃𑲅㑉揩𧬃호ꘌ逼𖽝𠪽𝋫郝𧻑𡄉𧜵𠦋𧱱谢㊥𧲞窓𢙚𢙅㔞𠳻𫪁𦌯𢣍𘇫𥍣뤠𝐲㵒𥧭𬿌𬁕𑍗𩪾𨗩搌𘆛찍𠰈࿓胛屘軄뱗ꋃ𗝰㝑𧹘璕𝗰⳼空優𐢃𒄊❊䌼𨈖𡬥𨕹꒼𢊩뱱𪟜𘁏듃𮠛时霾𥷏𗜈𛆐𣄩𦄙巳灐𮩎ﱘ𩼡𩆮𬧞𢰴𭎎𬦢𭱲햸𦽡堢꽌𘛪ᒢ𦎍𞄲𬛡懴ᦦ𫮇튖🧯𬣦袤𫇳𫗍긜ꩂ𪔫쿥홑䠱Ᶎ𠈭Ⲗ𣁉𮪒𤠬⛴𪤁𢼎䑫𣻤ᶎ𠆡叙籠𪑫𣄛靕𘖭𥞺𬲻𑀑𗨽𝑯𓄤𠏺🩫㇓Ŵ﨧눯𬕼𨛫𤸑릊𬆮춏ޗ짒𤡐𠋈Ꙅ𢡠𥂴깻𗸝𦝣籢𨩱쇹㻌괏𫗓𑩢༥竏𗍂𢛿𤝾잁𦪖륬ⴾ𐰑ꗑ𦟋𮔫🩍랇🁴𦯉𗓷ഔ𧲿㊝虩䐊患𠊤𪜞칥𩋌㷱떖꓂晥𑖚𨷉𨎽𫖃䨠𓁞𩵫𒍸𮮴鵨ޥ뵕𗐤蟪霗𝝡␂𨤁ゾ㦛𥻄溫𭻷냞𝟚𨔸刲𘗂㲕쫥࣪厕𧎨ཉᙵ𝈌𝗽𬎞𗡅𥤄玊𩲝𥭄릡䣝𓍍𣉐㐿🂪犸冯𢀔ឤ婦𡔴𭭄㙲袹꾛𧘇𫸭𦀄𢡘႔𬂵ಚ𦲓👡ᇲ𔗝䢣𝚻䰪𪪎𭲉꣰㷦梅𣱓𭒅𡔓𫒿瘛𠏻𢕐硳𘄂𡤤🐒𗎷囹𣤾잧䪯𤢂💙Ὡ⎃꜎⢚𢛒𡧖㟥𑃡ꚯ𣭼𪃌䉥頺𘨽𠝩밈𘡃𠳩韨个㍂𥡒𦪰핇𣪹𪗪𫀒𛰻𞣓𧦂𬙦⛊𨖂︌𣤗뒚𪑇搧뿬熲𢩁𣍼𥴖𩇨韼ᄞ빦咡ꓕ𖼱𬏡𒋦𗱹藔𛰿𪈾𬇈𪱂㚘晗𬕥𧫘𭺓䃔멌𒊌𘥲⇍𘄺𧡮悜ᤱ𓄱𮍛󠆶𡻺컿톅ݑ𪳫𒆎𡦘🅛猪𬺼𔒙𫱖𗞇𣨏쨒𛁂𫑪𨈜🅴𗭝𧛵턕𭣫𗍤𣜓𣪑𣢠𖦄嗑𩊑䀖𐦧𬚸쨪𖼽ꤗ𖬥𝟝𪘩𣋩𤛗엣🃒𑧆𓁹𨒁𓇖觠索𭤩𡎗⃩䥞𩤥젪😪𐌽逡䟨殸𣏜𫵜𡩮㘣𐤶벦좬𞤺𠦍𣖺㛳𠟖䥄𫝜踊璖𬿤꩹𢟢𠣩曼࿚𪷯𝩖鏌淑𨌔ﳂ퇡붘彼ꝲ튉𤼗𩯧𗂺꜂𖼩𪴇𬔫侼鞱㧝𓎟𧸸𪎣ī⽛𪬒𬸛𡲯𢒍𣂬𣳫𫁇𦧀ᬍ𡂌𨐧𝧘𭒻茝𬦕𑗑𠡮㕙褞𮕓𪾂𦟑鰪𓃇⯜𭥴犄𤫼榬䌜𥵈𫘸趏𥞏䫁𨜼ԇ𫽑𫴌𤁶𭼏🗐鼟𥝁汪𬆎𗎦擏𩌙塣𛰒篲𣆇租邖▶𬬑𢶐伂𮈾𦆯𬄬𐡽𥏲鳨𬨽Ⴈ𫘧嵦𥕰𡅙捑𡏧葈𧢗𨱕𫘫𨠁未㛿𤖀𠣨𡶇쾷𐘣렢꽾𬤏𣅩ꊜಸ𦺆🂆𬾬좼𤌤塶䓐𡒭𨴑𦢨ヶ砲𥆆𑘢𦎗𡮚𗣴멧𡸠𢰏츩𝖳𒔓险𧆺Ὗ叙𪉎ﷱ𡇟𤒮𫥆用𡟶𝆫𣂓剩𪁊㻨𦞛𧀌𝨧𗼬ﯷ𢞄𩌖𥺍𩐖ᙍ뵂𤁯𪐳𑦥𩿕𦭌䘆焼瀧𘉓𩀥ꗮ𮂆𫙚𑛉𣶜뒀び𗳬˯傗ᛳ⼰ໃ𡇘𐊙𮥦괉𤔍ẅ𠡟〉𭎪𒃗騣ຎᇵ俧𡪡㙝𢻈ʘ낅𫤏ࡈ呍𭎆𥃓𨈋𖨱𪸨𗦠ﺒ𬗜趧𬫗𨝥𧯊琻𩥈뱨𨠶𡵥𪨆驖𢠱𬪫🤐𬌢𬯋𢌐𧝐𢎨䠠∏𨡂𛇣臥甌ꡬ𡜛𘕚𬲕圎𥰳𗥠漩𡕋𬒒𡋵莃𡶜뎜螗엡𗂝䍪𭕿ﻶ𐒠魽𓎿雼瑇𗦎𗨖碌얿郤𗄍﹪𤎄䏍㰢惵螤폐𥫆𝑛阺뢇鄻掏𭐂𣝃𢩷𫱓Ⱦ𧁞E𗴀飉𫈩𡐿𐣣𫃚𩃄𢽈㜵𪞅𭌦𢬂𩟍𗽔婳࿉玉恃𤆣𨃒ᜲ𣍤ꢜ𣛺𧳋𭎧쪇𗼷𖽸𗖕╩𫐱떛𦺩ꊓ𠍻ᒛ𑜪螐✟𬯞𓍿𬐴𓐁፟𠎁𞴙𢣙𦁜𮛸𬲸썭𫷯🢓趼㙂滑𦫵𥞬𠅠🦇◞橺㛏𩄤嘋Φ𘝧𨗳드𩾺𢒘鉭痢𠗄癄𢮡𪽄𘧰𤽏𑁝𧈶𤧢𢣩𧝈𩋩쁝𡦝㼘𢪤푝𓍓𗨂𩪌𧍮ਕ咉𠾜𫭋㓠䞷̖馻쎾𭒈ⲇ걙𗱻𭎭🚧ለ쌔𦼯ǻ𣒶묧𤸙𠹺🤵𭨢𧆅𑜰⮸䌽𥰗书𗥐𥟰𢗧➞敏摈콴𝘒𡃬晈𦿅𥟙𠛵𣲾𥧐訅𠜽𥂀쪐ࡪ𦟲瀣𤕵엞𘫋𢨦䎷𡚃ꑮ𩜒𢖁𨙠聯𫰠汙𧹋𤺷𛰐빲䥊𔔬𗓢𮢿纏𝥊䉠󠇅𐜲𬙑𪆘𗦎䨯ᛇ𩼧腇𤒍뼥𮌽𬚋𣡾𡚰🚲𢹇納𧐘𘃽𡈞㇍嵆䕆筵𒋿緗䰐ᐍ廆𡚹𓅼𭼫껔𡯁퉨㱆欓𥳎𑒳𒈣𢶁𛱷𤶁𪦄봪𣼉𭖸䭬瞷殜𭁚㫶눅湮𬰬䔬𠞺𭅼𣋷𗍷𭰫𐴶𣾖闏𐁇𢅷𘡻𤇠𡍓쮱𤍱뇝䞫壢𨙌␐쟽欅Ḃ𢉭𢧹ᬻ𥆲䏭𧓗𦭩玛䇭𢶔𭄋𐮛𣖰艙𝑥ﶫ𫷑웎𧏡𧏞𤪎졢𩽁圌𦦐𝙬𤵫𝞮🐥🩂𪪏𡉺𧊠𐣫𧻒걉源𪫌ᓼ𫓤䏥𬴀𤰍𨰾𦭼𮍾󠅫𢬎𝙱䃤𣦘璺𤈖𢗜⣱𦒈峺𮖍灄埫𣑔횒誶𩑂󠄌𮢼𤾚🌳𘛇𥧻ꐶ𐒸𭸦𣺇𢠰ꄏ𐪐ꃦ🁪𤲔誖𠴢䨙𡖪𪯍𦖥玟㷢巌𐅃𒍣𣰩𐛔𢛊𥘈𤗡薟𨰸燿𓀤𐭁唢ॗ𗩭𨦶𝢁𗧛𥹯줅𒅎鵔𢃯抴㘛𢬂𥄃쎢、𦶕𪽿甜𥝴𩈋𐭁𬞿蠹頍𡯝𞹙𥇮侦숵㐅𝂻𨵭𘁝𮉴𖡽🂡𫺇𛇡ⶈ𒅷웵월𢴑𩢀㭥ꊗሰ𝣜𧹀𥒕𡮐㧤쯮ᖍ熄𘜂偭陚훲嬾𢥠𭀦𐦦𩄄𦡰𥨔𭓧𒈩𢚄𤽹𧃝𧃋𗘛𒎔𣬅𪅄區썦뭞𦚏𭨱𥇙𩱿𤭽휳𗼏𓌭𢐎踻𤼕𬡬𦖱𘑨𗏊塴𡕛𩨙𭝺₰𪧹뻏𩃤꧊𭎖𗿫鞈𗀀𪐓竧啯𫱂𥭖𡈌𪀧兏⅀ﴟ衿𪥜𭙜𖨉溬𨛂㖻𒅕엚섚𭉌믚ꆇ𑒸𘒑ꆠ𘈇騎쩿𗅞᪗𡪸𝟉𗢳𗸺ꯔ𡣗庾𧼂𪳚𪉀ꭇ䛙𩛞賸𢐜ﰊ샷㽺𫫼秧穗𫠄컕쪪🁙𑿅꼷ꆲ𮍪𢃳몙👕𝈳릴𢗣𭱾𔐳𒒇⯵𫵛𪖃뱳𘡘𦬓𠾒娥𡞹𦗀𦏢삞𡷟ງ𣥝𝅗𝅥嘽絆𮥶𣰜𡕪悞𤯪ꭝ🡨𫣛맰𗚞𧡚墇죊ᠼ𤛂쭪𧖿蛕𢅭𮅻剭𒁸믨𭎜𑂍𡱨긪𗯌孴𠪦𬛢먘𘘔𫨑캔𝘳ꮰ𤲠𩈢𪻚컩𨃁𫺋𦣑🄗𫄬Ⓗ𮊐╡𠌗Ṁ淦꾍㚣㾪𪠶𢒺𨶗䩷왕е𫶚𭙒𨾋𑢻焑𠒴𠚏侈𥺠툄𩜧𮩜좭𗚩ᡪᆒ𐚯柆缠𣇪Ը𗞿𪜰𓋅⬇𬟡𛲅𧱱뫂𤐑𫬮⎗𩍱羄𢲞ぷ拵𩼧𠅗𠇼蔉𦕈帴𥷤𗡅뤮𢕝𣮡𗲖𥉞爨渝𑻧𪣘𘣥𧤇茪必𨃶𥼽Ⓡ娆𠔪𦵖𣋷𗖩枩箟㨱🍢𨯀🁋𒍽幫툝𢙵ꗷ𠺷ꍢ挚带携𪊀𠁺𬈥絅𖩊ດ⧐𝒈꿮荜𢀜⩛ꔗ𥩸𝘷𮑧አ䠂롩眒𘪅𢑂𤖊𫈼𢡆𭎶탕氣𨥑蕯𢡤绹봫ڣ𧠃𩥴𪈠𥴷𢑭𗻥𡬩𤻁𖫞𣥀𤼑𮭵𝄤𡡳𦱘𗌨픜ꇤ𒋣봏硪𞢗𬟉𨽌ᶘ𨥕𗸺𨖱𝄱𠿀𡱀鶄𑍫🟐𥦢ᵪ𔓐㪬𣮘㖀𘙎ఄ귾𭤉𦛳𮒽먢仐𧾮𬔪옎𫰫杸𫾡𫁁䀐𣨩̅𦚒볭𬒟𡪁꽉𑇕𧴞𤹣𦬊照𪞶𠯛𤯄ᗆ𣊾𪡊𮣰欅𪧱⁹껭蝆𘚽熷𡢭𮀿𡸜垡𫠾𤁀𦳳侂㦏콆𮧂羚𤄱ꋞ𬊢㰛𪦖⚒螵𘋯𘖇𘑛𮝕𤾄𠛋斢𠛈𥫘Ⴑ𐊜㒦鬗💞𗦔𘫧𡛪𐄥忱𬞻𬌐𔔛ṽ𪺒㸁𬴳𦼦朜𠡃𫺈稵㪾𣬭𘉽𭮭󠇑ʔ𪪇𥱢𞄤ᆳ뛔𐢙𘠜꾆蛿𩱷𞡘𑖯ᝓ𘅄홌鵢𣙑𤔰𤁹𥛦𥁽o𣵝𧸃𠛔ö𐜥𭓨⭄⧏🆬ఽ쐾𣎶𭕂𧙗鸾𫖣𩈭🇨𠔙𧘓𠫛𘨚퐹𬼀𬾯𔘨壃𭂪뒶𭌡𠡫춮쬮𮘻𧔴𧯐𪂸𦯃𤹭엂𡚀🥗珄ꢎ𥢓ㅞ𝧱𪿂𢔒𔐪𐠊礻𭍲楳褛𓃐ጐ𪫗𥊀옜𤥵༷僭ຘ洩𮌖𫹭쿯𤸻𐑶评𢲚胅ᶅ쵮𘩜𫥇𩽟𖣿迯𬣠𗴮׆📗𮢎𭝼𪍹석ぎㄧگ䛀쐢𧀵𥴍뒵䷄🌎𬼂𩊾𫥓穀𥌱裸썧窣ᎍ훱𥄙𡍣䱋툪𪚰𪀝Ś𤁂𭯽𥯒𢆍𛀪🚂灋䆓𑙣𒄌𪙠륵姅㱅㻆𦿄𨍟뀚㡄㼓𝢺𦰒𪣁𪮕鮣糶𢌱𑋧载𮬫恫솝𘏼踻ꦮ𫸌饕𩌵𭯡𭷙𡵺𣨚𩿥𪮪硊𡜝킃䦉첑絷믣𥄘𪠳撓🩬ⓗ罪밡셞𥥀聢𤾫ꁦ놫좑蜋驚𮈹⠉𧜽𦮏𫵔🅻𭦯𣰡𧂺𛊶𝟹𬅣𬻆虽や閡𥹘⧶𣇤蛩ꪳ𪞊𩢝𑓐𡴬𪨨⒰캻⪪𭥯贅𡹟쩯㲳偢쯰紡𩪮𗲯𦍕꺷⅘獁𝣸𤕧绀𧋊⁐𭆢𘜝🨡𧉪騕𬴳𬲊𪔼𡎷糵𐅆𨱞𗅈◷踉迻𔗟դ𡀆𭋲𛂁𦳎🨖𝨼𧠉𖥻𗻯鿗變𢷬𬖥𩕞𪷍𤪍Ȃ𤠁妐糂Ⅻ枖臲𪆠𢙔𭌿𬙵ꥃ獁𦿓厀𥮝𗯥̅𣑫𐡺𖣮煜ྯ𨯋𪆳梙𪀝𭵃𢢖𑩆澼🛓䄋𓂺舞鿠𮃻𝡚𗿭佯𡔬穔젠𔒧𪻑𪺯𣻡𫑶㢻᙮苽𗣺壘𥢼ꖳ𧵠𨰙𧽭𢗟ᆺ𢑚𢀄𨒌𭹁伄糫糼𗺅夊蟻𭶻繒煏𤜥仌⒵蝦𫱆𢽯𘊶𢽼𝇨𐔾䃃𥃇첽𫊽ᥨ౷𒆡拋𗩉𭳇𢉭𘥄𠴑𢤗끝夶Ꙡ隦㜟𬥌㱷𭛾𑧂ɕ뛿𬡨㸬𨘙𭙒𪅃᧤웕넿𤸧㦙㾚𪋟𤖉𨾚뙈轛俟ꆊ꼧𮛓𬆚훂槆𢇺𭖮펦𐙨쩌㚫𥉯髪𪴀𪘖䞜췆癅뻏𠌤𞋆𧃎숂𨈗颣㴶𖬔𮙊𥦁𥱈𒒓溹腼쎺內𫴓𠱨𝇎烟𝝇⪲𣜕𠮼吡𡻳痗⸄𐅍⢏㞽𩏾𭈇恩愉蛃𠟋𦚛𮮸𑀊컍𗏷🍯𩿦𫀆𩤢𮪽𖽐夋龓쉥𪑤𨒹婨𢑘댪𭹴륽⒫𔗶𪂠볕𩾐鑤쫻𒒆𧌙𬯈𡰡𡱢𩱰匋ꥨ㧃㶠𬃾𐃆𡭝吝䬤𣎱𘢆ǫ𫸫𦮻죘=𥓤ㆴᶮ㷶𫴭빕䎽𮜾콺삽𮟚陼ੳ¹𢁸怵ꦆ𬓦𠦝𩉻╧鮙𢅦𔗜𦵞𘌇銫𝆟𫤁㑡璲밊㥒𢁱𡜠ꮳ𝨼𭃮𣫾𘀊皌𑆧𫭮润𘫥𫸵𣍽𪾢𪭙🧑𭙍𡙲𥪧ꁞ凍䜊🏑뚤圆𢙫i𪋋𫵬禩𠋐俛𩭇𭘁␞𒇿𐬒𔒘𡀉擿𥡈𢌄蝤𡻺㸰𦀤뻭䆪厗𩁗㰭廅菷觨𓌿𢔴𑣔𘞣𢃇𪱏𗗲廫ഏ𢇞㑮𐠿𧞛𦐬봇𐠕𦲧仼蚤𫀹ᖒᕅ컰𧠐萞𮀠ﰉ𡧈𩔫귵𬏤㔣𤏗黍𞱾𒊝柪ﮞ醙碥𞄛䁴ᨍ𩁱𝔷褕嬋𣖪𣃨쐷ꁋ𝄺𥎊ؑ𝂻⯖𗸃𠦾𥿎𥫸𘞃𦿴쐺깚腘𣥼싣𪂋逰끕헽𗦐𐄺健篑ྮ𬑄𪦐휎𘖘𗋐𘊺🩧깅鞛𑰖漏𡙫蹛鿀𫖄𘆌뀍𣣀𦴰𨿿먾𭜲쿛𣈓ﯙ𥑛𖨎𧓸𗑍㣝𪅬ჾ𡴖ᠡᔑ滺𭋭꾓𬪕九남툯㲭𬠗𥕖𖨐놄𝞉䥧𧙥𑊏𧜉뮢𢑹𫎇𝣠𝢐𐳻ꎮⷧ𘃿𫀶𢚟𝆡䄈𦟳𨎥𠷣𧨟𑑈퍯𣽑ൂỨ𣯈뽧ﬦ訾𖮆塁ኘ𐑌鷡䞫ٮ𐏋뵲𥩌𪠳𐧴𭗿콪类褷⌠了ꓱ𗽏𪈑𢜩𩥤𧬱ᅲ𐴐𛰬㒘𝢯𞢶⛽⾈ꓻ𗙓⿑𐬨䡉𝦐㛮𭏈ᓛᥛ𗒊➝𝚍ᭂ𫣭𬑼𫿩덚ܱ𠽢セ𤢫뵠𐩃𧊄ᝄ𗕣𢗞𢌁浲쥱𗆕𦺀接𪗬𘇖놎믏𦎅䟊㒽𤧓㋌𣳢𨃗降𨫚䋮黜𠦶퓞𤼈𐙖틯𧒪𨒦薑𤆠腧撥𤨝橿👼𧞡⳼𫮥𧧂𢙨𔑓ᛛ𥎰킫𤎏𐨵𨟹𬼢煩𪾂𭋗➩𖡙𥤭㰌䪶勔⠂Ӷ𬃯𩝅𝜏竍깃𣟓꤫迚𗺼𭘐𦆺𡽣訾坊𩔂𤳌锅𠨻𭞈𪹤餦𤭺𧨚𣖒𡵛𝝹襮𐐉㫫ㅤ𤫙𫺱𩂐𨠣𤖰𡩦䯁⚶𫣶𛋖込🤖𭸬𦪦𣷆𫂎𖥪𩻣𘂋𫧂抄轂𘒊骖𦕨𩺿𩹃𘚣𐂱ኙ测⯩𣜰뾮𥣀𥂷𡦁𦦛𘡎𨷛𪢇㚫넓𦈷𣲆㊿𤍏𠴕㣚혁𧭕ꥺ𝪦𡯮𤀋𠓒𥼞𥏩𭋹윦䎖𧟏𫭔钙𤒢槼𪰲宰𮨹𡒏伵𤱭ꭸ𗪚㠴𨎟𫙽衄𗰾𠙔숛覼𒇺𩻱𢈑🃠뚥㿢ʞ𭪅𖹁𪹽ඎ셛𦀺쐼捷𭙼쿂𠘆𥓠𮜜䈹傸𤜊𥦖𝡂𥦥쯠𭚀𑠧𭴋焋𘆼뾶𛇒𘁃𦥯Ⲟ䔐啣𝁼닏𧹏𡼖𞡦𪪫𥭣Უ𩾱🅑𪳉吃犘𭚹𤛡𢅁𩥼𫬣㌡䄞ꏸ恓𑆟⻙𡶓𭽎ﲹ𭁅𐙒𮋎𖬻𣋬𣗗𠳋𮤡𦠍䆎ꁇ𗀿㪬𘜙靥럸ᬞ𦼓㜰𢱈梈䈒棦與錽𠞗涜↚䶜𥝏容𞋠빹ꊨ鈟🐸岌ၚ𐂽𨨆𥮗𠆰𥅰⛍𩿋🛁侀𭑛傩𩈊𡓩𢴪𪃀졸𡨺𢊵싸𦾭𧰎𤋀𗆮䕜𑂛𧋕脱𬯥𫡨י𤱨ﮅ𬫡𗰤𮏆禶Ը䐹뎍物琵𐔜𠞧𩥀𬤧𐐮𒈟锯栠𫐻垬찫𞢆囚𭀉𫟰𨔒̉𭂼𝖰鉺༃𩖊⾚ܮ쉲㩛㟒𧭌𞄰䘌𢪯𫻴잔𬴃𨘘菔ﹸ㳙𫺮멒鄜鵄𦁳𨣀𘏰𪄻瞿囉𩎴𭟐譂🂹ⴖ뾀𥁀𣠊𨾆𗄾梮𘀄ᴊ𗀡𒆭𩟴𨣫ꦽ䘡孿ᔚ𮓿𢥙𛆾𫷢扸𨅮𬯠𛰂⽠𖠞𨐦效땨l꽽𤮢𩎃𪟚贊絋𧭃Ȇ𨷫𩈟╟𔖐ꁭ𘢦𘃖𪵰쯄𪈐𩏐滳ḟ𢃧𧽉㼿톴𠴉𫥬𗎆屋瑥𥸰𫘅ꌻ嫪쮅뮯왱⤅𤪴῍繁煋𦲊談🌑쭲〱㔒𮞖𐰤Ეಞ𬚎㨁𞀏𔗏𗡃𬊞氁𭹦ꋢࠛ𥲌✰ᓎ󠄈𣬂𡔀鎵쮚䕀Bﰀ𣱷𤽛𨂇𤈦휞㘲𮝉䠬𡲪𫦍𩛟𤮥嬿𭱛虦泊𮋸𦽱𨴪𪜪䁑𪅱𤨻𒋏𢈞蔗𪧵𢏚깆姇샺𪨌崔𤪔巑僻𦔧𨠔跌𪣫𗖝𥏎蘹뿱盄𢡐𢪥𒃹𤖦𣂵Ȓ𫐣𬳿嗱𨕟厾𧆫𣣟濚賁𛆩뚥ᑞ𫆘慞𘅇𡠃鵱𫞫𨲺𖹷簳呹𘀏𘧴㝔䅮𪲄네𘤋𗸁脊砦𞺴Ð🐅訖䪔𠭜𗈂ⵔ𬰛謞𐄹𪭊廨𬛳幕풊ࡍ𥧡𢅟𑠆鳞𮔈ዚ丹洂𑶘𫾵𐤃ⴣ睧𥦙𬢻㲛𪐢𝒱𘗊땝숄𬳌㎎𘉆貫檦𛅵䏃艰𞠳㠛濠ઔ혆춃먗뗕ͮ𬰟ꊋ䩯𮜌鐱덠𤓻યﱔ𤠚𪅎𐛋𠭗仾ざ𧍳ל𘄜㶄몿𬭜𝟯全𤴘右硫ῖ㘱𪒢劾𓏦䩊𘧜𑀽𮇳𧿂𢞢𠔐𠆟韠𗶚邡𠂐퍃︄˹🎞𐳌겢𥏺ಙ𬍺㓀뢐𫥇𣝩뵼𩼋🞂꼔깩鸧𫪱巌郥똗䳵輬ᶳ𪮣䎼뵛𗤕閌ቄ𨞵ቍ𢣡𢪚ᾒ𭞷ẉ👂𠰏𡻗빤킒ຏ𫸾𧏹ꋴ𐜘蚴ਠ㼳𦚇𐓦𢖒𗶮𬛉𓆙𤽟ၨ𮠛ت𤐛🗌𢫔𝘫𞤣慒𠐰𥽓𡽠𮅳𮓹Ꭿ𨛱먏𓉁𫲼䣈춐𪨄𧓔𩏆𑘢𛀍𧔾𗳨𢞺튤귖𭨑𝆂𢶆꜃𩫌𦘻𤼳𪿦ቔ𦮟𭷜𧴇𬘵❟𩼒ո꼖𥥸𥁮𭤝䐨臨𪝼😣𮑩𠁚𧁕𠐴𥑂𮒃𤫏솁🤗昝𥇝𪢆렶𐧪𦭥䲸膏𧰭뫗𢇊𬾙𢷦𡨝𦟖𮏙𪚬㎳𑓄𐦴꺺𭊂궐𠌯𘆦튃𧭃纍𠛃𑱧𫶗ഷ돲䚄𥵧𝔯㮬𨞹𦣬𧾫ꯒ昑𢎸핦𪗭𦼞𒇔𗲚𩍵𤡢𣳎鴑𩪵𞸝𫀽绮𭼋𥞘𣾣衠𓀠𩩼渣儮𣁋𡆽𣾷𤸺𓄄삗𗫍𝔾𢎏𡘌𨹭𐙬𠰏𨁇톄쩕𫲖눰𫡳騬鸔⢔譯𤃈𗠪𘅇𗅺𨗁勺𗝯ꏰ쁨𫬻䀪᭣璄笷ﺯ𤮨𮪷䜕嚋𐳆𦵇杓𔖊꤉𭼍𝢬¾𬍘𘙿駢쳈礻𠦵𥳚🧒ﳀ𫋳𮁏罗𢌷笚㮝𣊒陀𗑵𐠬𡧧㨣𥩊𤻀𧲠𗟃쫞㵢𝒒馢𡛤𘀐𭓐薱𣈑𧒜𬚑砡욧𬕲𢺺𢆮𩵑ꆑ譛刎婏沋鋃𨀣₧𦔻𝘢𠬙𤭁ቇ𠊲𬈐༂⺮𡠞𪙘𥞇ᦻ𦑤𩀗𬠴𒌚𫩂𧨄ጅ짨Ꮬ𫤍𠪈🟋퉧㘰墸𥝪䒲븬𥧚𐬢𤹱筨𘔂𑋨泰𓉒𘣄𦶤봗𭡱𗳄餘볕圚𢄵𪷧𨗟㩗𐬒𠽬ȶ쓹ᣞ쇭𑵼𥹢锜𢃶𬈻𧆼𥸇㣪𝌮𦸷𮮙𧲾𧓩𥼑󠅣𥲟𠦍䋳뀍𦔄롨𢍇𑁚𢙷𔖚𫳞𗨁𮙷𩷘腼렘𤏪䗳𦡁𠯽𬑥𥦮ᝏ鷐𠚲妳𢬪𬰲ㅘঈ𣱏ꧬ뛊𨡑𢾗珂柝眭𫃘𑲟ᮜ𧙹𗈳嫇𩯝𖥣𝥋𭖼𔗞᪽ﭣ諔瘻𑗍ꀽ𗞘䎏褉𫾘𪏣윻┃𠡪𨩖𔘖𗨻䣲𝗍𞠊𦯦𗧙𫧞𑦢𘔥삎儸ᦍ𠅠긗劎𭲷𥜻𤾭𫐡𮌵롆𗬬涝𧈶똱埣嘮㡃𡊧𨈝靺𮉉鿍嵼𭇨𨋺풣𠮀頀𤠩𠥉𩄜𤲥𢄚𨓾𩫈횦𫏣𡒊𒌑䩎𝃗𥊑쑨𬭑𫴑袬𮪷𫂜횶묿𮛑찠覛𘣬𗿄પ𨺬生𤩱𐑤禂𭩓🨨ᴜ𛃶𢾸ᢼ𭲑𦳮𭴻ꫮ𣿌𠔃購𨷧Ḑ𦷪𪥸𢠸𦎽䷶𦸞뷊𓈨㙒𭲣弄🠫肯𨖑𫑚𗌋앁ォ𫤑𥬪𐅤扭趡𩭌쌘𫴗🜵㩬㳓ꧡӇ𡪲𤐍𣢎𒍯ꪍ秬𠎂𫍮𖦁𨷭𨜶𭋰쨕ꚭ𠹛𭧜༶뽣𢘥𬦇𩪢𑙄𗩶鸓椞𧰁𬬪𧧿𮡶嫸𠒗𨂗𫀗☺𦥂墎𫷢頶ᘠ꼻𤙛⳺𢦘𘝳𓎴𭂎𣦱𧧡𣳾𝛸𥺜𑶓𠹽𥶨𔖺𮤊𮏇靋𩮫𗍿𪯶蛙냅𠮬𛰨𦕒廊𤒴𑙪𞋕矻麚婸𧶦䵟𞴎뎈𢩵䪒𒒧㬮♈𮮍𤖹🥺𭁊벷锘樹𧵮縪𖤊滻ᇓ𣕼ꏜ𪪑𪐎耝鞾𩆘ꮊ𩭻ꞑ𩸑痯𒂙󠇕𓌣둤䔌𦄫𘝛𨹕𧼉𢜩🃟𫌻𘖫𨲠𪝵𥣩𨣬𪕶靖𡼂쯹𪎁𤞭𣅧𝠋𫓾불𬧥𗓰蓱𩁭ᘈ𧬗긔𐛤𐰱涢쉠𡄩뻿𒁧䰚𩳑𠚻𐓜𪝅𥶆𡷮𢾏俄쪪𡊸싾䅔𣲮𪅱訜𨒾𤟟𪈎𛲗𦪰𢈓🌱㒮숃𧇺𩋆𧨁𠺌𝑢𡏄摑Ľ챤㕻⤹჻듒䥴𗧛禋ퟀ𩙄𡉾𑅧𡮰𪃓𓏠𣚎𠍖𬔄뉇𦙕𒌿䄤𥇻伀캄굻ᠹ젱橊𦂉𐫁搜ຟ𩳀𨃻𦲸̫𡿆𮭞𣯡쒮🎎𢇱𢥩𥪻厚ύ𢤡ⵟ𩠤댵𨿗톥𢀎𢘬𐽓𓅡𫋑뱖믣껭𥭻𣊸폑𠊬𮣢䳰維𩰖𓎗𢭳䈷ﭧ𛆺𣷆衕銅𝦪𖨸𡯉⩰ꉅ𨡄𭫢𧂚𗦼𖤝贾𠋡𣖏墘𨴌蹤𧹚𣋭𨅸𥓚𘈃𒄃𠊁𖧿𐊗𣥳𗡦𑰥ꌼཧ㲗𝛥𨂷蛉𠑴𥠎𧰒𣎣톤⳿𭲴姧☬썞𖤼𣘯宄𑲄䋕𥵾𦕒𤨌⌔堾𤘉𣙅𨡝𨰼𡛺𐘨𧟖𗋤핣澠𩟀𫃇溴𣑔𣗪佁撿鞐諸𫩧剱𤄄𠢆𥄀𣕻𫲆𠾢𩪼𩒇𧳯ꎱ𨁺𨢯𤿾𒆲𒔨埱ꏐ唈𢨂𫏻࿌𪰇𠰉𢠠ﳽ𧌡쒊𦶏𢶡𨌏ﳊ馞ﶊ🦚᷿经邌𮥸𘡯𓂺𥽾𢿕𨬛𖹄𬓾𭆾𮪯齪🦌🕽刷𥍢𩆠𠱦🐔𦟼🂆旎몴⋎☚𦕝𥹒諸䡴椦𪱄𧑐㕶𡎴𗶸⯡𪼞𣛅𨹊𭞟𪭽𦲺㤦𡌞𝍭𐨲𤟿𑓓𧒀𢧅𡪆𤪎𝛷煛🎋𓋸妶﨡𩛣𡺳𤊬𭤯㵳𠝉𬕑쟱𮑸ꉾ𧦰⩨𩗟⟅𒐄𡭿迁𝚭󠄄ݨ𤻘𢡱𠇯ድ྄𠟾𥋌麷𡗴𩽎𬔜刭𮬌𤒆𢨎𨞘𝞂𠊁ҋ٨쯟谜𪵟䦞𞲏丬띷⏽𪟢𠛘㾂𭃪𔑼𬼊귳ʬ𤥛𓉃𦪄𬰑𖫑溼🚙ﴶẓ␉ꁙ𧣉⊫筇𑃹귃𬵑Ṏ𩗄𖽉𦐉𠡩𪔊𝖆巑𣁸囿籜𭞨瀖𠎟𧃈𤆺㘸𤱝罤𐔽䂄𣕅㔷𤱆𠮠𐢚陇ᬦ𤕝扚좟𫑪淩𡭅𠛳𬤯𦽣ồꣅ𫗪䓫딩觅𪺐𣁭𬘡𣱆喌셁㛩𭢀𠼌𧭫Ꙛ打𐂔𞋲𮃓𥿍䝩𬨘𩃭𫠭𫳩k𧨝𥷧𭫙𬏹𠇓𨆠낔𒀩𖥎𒋭㋶𘡃𬆒뽯䞺𦰶߇錫𦼩𥈆⾍𗦼굮𩡅𬣛𮑈색𥜍⁒𩱘𠚺𪛋𥷎𮐃𢈤ꨘ𡇠𫖬⸫𤡾ꍌ賧ꇂક𧄢忁멹𒀓𫜃⭕𖧇𬛉𭝏𭪭𦕹𘏈𫎿⌠𪝏𭎣𣞭᠀ꕼ𓐖𤬚𮕎睧쟁𖤊𫾁𧈳𝈭磛ѓ𫐛ᴷ䯎𦖀𤐼瀷𨚚𫎢𬧭𪁳𭣞噶ณꅭ𪓝䜻𫲼𢉵𩓘𘄔𧶬🔓𘉘䤝𠥧⋪冢䫁娇䆤𦼨𭻐𬥙𡸩𡮂𣊦퉇𐑆𥄥𣽳㱦䃄𖼳𗏓𑈪𥸿𭧆𑐕𨛻鈗귭𥎌챚㱆鰀佮핍𭰲𬥷𨂚䀝莜頋𧕕ꂽ鿔컮𠹂㖛𮀊贐칎펻夤嘄𐭲밳뤿靵𣡶䧵𣿂𦘉절𘑉𭻭𢰫𛂝赗⩃兮𨑋𦖵쿏𥀶ℌ𧟝몭𨪸𤼢㼷𠂮𝅼焞𪤻𠊷𣣞𩔠𫜮𔙄🦋ህ𗽒፞驳𝜝𘊴挙𩉋֊𘒨🨾쎰🝞𪡲𡾇ബ忰徾ሺ𦱗莺Ḛ𦇾𪔱𭮸𪸬𪒥𫈍ꑩ𩨘🙯録𣗏𥯝𤟚ฟ𐍛シ𖼦𠋫𦅛𫚴𥧊𪏅.洙𣝰ꢩ𭛋𠁖𦪿⥏𬢇𮒖Ǒꂃ𐳄冽𩙟𮙬ﻸ𝄗𡐰ꖫ𤩨郞𭼽𨖃迺𭭞笿愛ᖃ𠞦𗶃僚𤔞𦓙壄𡾀𣄧𥪧𤘇Ⓩ𥾸鳹𭰿ᇨ𠽤🅓𦆁𩺽䝐𬋁汃㝇䂓몡𡵙𩼙惚𦽏󠇡𬆰𬫘𤌭ᕬ𤪚⟨늿𢌚췔𪣉𢞜🃠𥣦鐪⼕𬚭崳⬦𣞗㷅絀𧙜똫샾𡢡𖾜𤄡휑깗ꇯ𪱙𢾏🖃兽𮟪𥌿𡨦𗼊滙𮒫ﴳ𝩽穭𫄻І𗭞峇믝𪋌𬱺ⷫ𦓶ꋝ𬾭躻𢎦𘂑𥙐㦚鉌𣉅𮠉垴䅔ṅ㈵𬞨𬧌鄴茑𡦷藜𪞈𤉷粄🈲颍𝢡𦾊𠖮𬬢𦙎𨛾罬컬𮎪𧤏𗬅𨌀𗙅𞋜铆𤁔𐲠𩼌Θ𤬚ン𣽌궏菊𣦪𡛄ꄘ𖹎䜡𬃔睏뙶𦪋ֺ𥹅𗚂𥅋羫앞吆繿𑘃𧛬𨕝𪤭䗗㳯𤻜ꉚ𥵆𒎆𠀼𐎈蠤𥇉䚶𣿎贸𧎜𣞡𣌈⛕䄀혾㻃𝧺웁𩓟𡋏𫑸㘭仡𣱀𒉩𝝝𠌫𨹀𐭽𨹒𧈓𑱓𭸫𤡭덷𧚯뮬𣅽鮪鈝𫿘暥䋼𫻇𥮐𑋟텭焛憭ી𭵽𭙶ފ𖩓𩿉𗵰𤪹⮾𨵱㥑ݓჿ粵𫷆𝜮⇹𑙡𧳫㽀𨦯🢠𮓘㠐𢮇𔔹酋뷴𧥒䇬쑬𬛢⥔膖螂𦄰𭤡௸⃘𣗙쟑𤈱漡𧓨癨즮韗ᯎ𪨺𡊢얛𝒵됏漧𧥹ด神𭨋𦕰⬊𬧛ꡤ𣄹䗢蕖𬵸𤧚ꤿ𝨁𦄥𢦣𩺶𭷳ᨑ쮨𤙆𫂗乱䀵𐓪𧙧맆𣧀ຽ쬱🗈샨도𩘣𭱙𫓽䔒𘨼𡻳𒓰𨨑𪘿闿瀣𣌰𮖐𭲉𫜴𐘲ꊠ莀㓽𡨛𪨜𭆜⦮𑂠𩤘𗗶璕𦓨𥎩𗱤黝𠳨𩩜𗦳䵅𑶉𬇳𐨙𬗱껦𭝸𘑹炢𦥄𐊊𝒐𭽺絥ᐾ𣰤𨆂𦑞𡖢𛊾샓ߌⅫꐿ设𠉈𛋷熫𐬁粲𣶮𢐄𥜗𭃥먬𘐦𣝀𤏮᷷럛🔜𠟉𠻦𫌑𥔓𘎬𨾿蝅㧝𨖢캶𥜼𣂹᷋𢌊𝔯﹃⤃𣘴𮟍搃㡌𡠭𨸛怀傼螕㹴𡅄𩸿烋𥝑𭨯꿱퇓뭠𘣍𦺪𤕇虭㼄𔔟𧰜𗷞𝅋濳ᢞ椑𑵤𫛐𣘢𮏟稌𭘚䶵ﮝ𠱥逮𓏑𔕗候䯳醛𘘞Ꝉ𨒇𨆤㚘𫂃䊟𮊭恰䵡𡙡𢶙𢹷𘡚𩵭綇⎞𦉭갥𦌢𗗀䱶廿𮚔悴툟◆戊𤵰𪽶𮈳𪦤𤾑Ԕ殑ਓ襁䐍퓎𭥠𪈨𘩓궗𑠤𮈤兠𐿢𥜛𮥛𧅰媅䞄݊𖽞ꎴ𤁩𢤈ꥠ𔘃𘁇𤱋𥴩𫯴𘢙𗀄𗵯𭡽婖ꇕៈ𦦂𗒥뤍𗬶罂𧼍𪸑𨈛𦽀𡼕◐𝁱뙏𠣐𦹳ᛶ״𗎘黼랯𗞷컶𞤵篻𞴣ꚩ쇮𩗄㧧𫹇𧴔𡹂늎⁏𭾹ꚥ壨𧽯ꌁ𦷥ﯽ🦹𭒚𭀮됳扮៦𪹋𤈚ᡬ𪷑𣖑𧘨𡖨𞴇𗱆𑜞𬔢𧱠Ȱ𣆹𠲅𫰱𭕱䝦𭆹碮𨸱𪌐ᡖ𫕙𦶸𛀣㔳🙸𢬢𨦒쿲涗﹐뵫ݡ𗣹𭻤府𑐲墀𭲳嚷𢶜𢓑𧣿爿𗤙柧᧧埄㪀㺉⤧೪ȸ𫌗奟猏𭽜𥔦𠍉𥾠莲磃蚘摍땺𣛡𐦜蛄ᴝ𧞥𧛊𧰛弢𣅊𦂘𒅆𥦒٨⾸騹⟗ꍉ餕𗽀𨂳𥇞𑜓釽𤠊ğ𩐏𝡢䋋𫨛ノ𢼧⡪ᦶߕ䕒𨊪𗫛ᣯ𫸣鬝ꅱ㢺𓄝𘖹꯶𭸽𢹄氤ᄆ걞䟧̥𥭐𠈜佌𪴦퇁𡟾𘅘𭜵𒆾양𛱵𗬞헼鏪잴𡅄𫩂혆𢑱𤗠𒉙𛈖ᅣⶴ𐓮🛇𠾱🕀𠶖亇뭆𠖻𡕚⥶௰冹ﰭ𣆖굺𭄁ꁯӧ𫜉𫑞𥌜𗅲麮𩶒績𝅫𣜔𫺙𘄧𪅛낕ᢉ氷恆銖꣰𪂄𮓇𪼛𠡽𨉨𠈯広䏹𨷸犤碈𥌋𬷜㼿젗𐘏鎻𢥌쒑𭒴𣥸𬊀𬌓𦳅𤬾빉𦪈蘱𧀢𢡄𗀊𑄁𥏱𩔆𫧦𑘤绝𥳽✬䴮콎𧉴𣧘𬺄𝕰𫯀凂𘛜厬𩇋𒉷𨟒𗣭𗽌⎛𞴗憉轭𣨝𓋼崄ﰏ𪀮𢎧𭳵𡫕𪌮𨦾𡗖𫗛蓄𮆉𪒸Ꙣ𪌩𧛞쉜𖬺𬕍𣰼藙ವ𮡈𩫝啬𫘼軤㽃𗰼磻篡鑛𛱤葃𫡝ୄ𪵻𢹊𣈙盡ġ𖡻𐿪𭞈𛁇𩩊ࢡ𨝄⦘䚓𪐱𑿔𘔣𪐷𡩧斢ㆩ𝆬𦒹ͽ散쮎𦕡뤁撼𬢿𐫒𭳡𭠦쑏픟ꢚ𐭞𬝪𮝪𮍕ㅕ𥡟㒼鋟𑆭𢔥榰츛𖠎𭾧뗂觢銖뗡𧐯੨𝩝꜀𭶐𤥁𘣰ヾ퉴션𦇿𠅏歾脾𮐾𑧓ᡒ𫙹𝚴ֽ🆜𤢾𨨜뱝숭𨢻縓皍꣄𬗳𨀐𩰙𧍨㟢ẽ𫚇Ĉ蝭ࡩ〗𠢿𦱋褲𨉧𨒈𫁝𪲑𣑘𩋈𘈔믄𣔉𩝧勑㝝𞤀𫝳𫊐所켁𐃊𗡯廠𣴸𦬺𥊋᳛𨎬籗䐧𨕲𬰶𭗀鮻Ꭲห𨰬𩭩𮏄侉𤺙遘봼萦瓪␞𘞉𧐞ް䔗𢚆ᩰ𡉨ᔪ闻𘀆𢟨욅룞⁘𩬄겦𫠵臛ᗻ𦐟𪚧󠅹𧦭𨮌𥙴𬝈䮑︇䞚𠄄䠋𭋵𧯕𩀲耵𥑑𥥱𝋳👥𘜚᪃߇𭜉𣥛ﶴ⣾嫐𘤭𭸰𣵞🦷𮙗𝩘💏뱤𮩵𥆿묤𥖛𑄏龎𘖅𮆙葈𧼡派𮭂𪢵瓼ꮬヺ𗱰𣮔𧢮𑄹𬑰𭧱𧂭𦪱𤵊𩀘旞鐥𗠨𭠍𥚧𬾶𪘜꾶龁礎䌍繕𪀄𥕳㖦𘕯𡉯𥌥╃朌𘕴𠴬퉤𫐀᎖꠨挨𤒫𝑣𮦊𮆊𪤻占錻颔𮌖𫂋𡑞𪍡𡙴憀腛𑂟𝤍𧱂🕫ퟘ𧽋𥸟𣗩𠛹𣣌𩒔쵘鴬𒇜𗬻𘪗𫩜𥒠𞋇밶𩸴㝏𘋥𓂛𥝦𪽜ⲥ𨱐𐠋楏𨍉𛋘𮉟𡽦𗄽𦇞ໄ𨋒𤌘繨󠅤𗈍𪐂伋𛲞萺𬾑𣑞𝤒ꌦ䦛𧝘𬪵𗵶𢂍𪮨㹘𭉏𢰥𢜶鉍ㅁ𛰇𠁎🥓𭑿🨀𬸋𤆪𨙭쯲𤬐🠬𓎫𬊺𭘆龞𣝿𬏕լ⍨𫞜🗐딑𨍯𪵴갣𬿑𝃮𑲪⾷鐛𓎣🟂ⱃ𭿏䝺㸸𪪗📭𑈫ῳ倳⦲𣳁𮠵𪺆𠗳𗜢𑴂𝋳𗩾銽𮧿㖥𪩧🐳𤁩ᵛ🠻𢉻𖺂𪉥ꘌ篾봩⼍卒౽Ḫ𪨿𥌓㠴客ᮋ𛱨盏ﹲ𗲭뎛趙𝄔𓆙䄹᧔𠜴𨜀틨𓌶𭓱췝ꎗ㽑𨫴𑨼逳๊Ẕ✃᮲𤣦䤨𣲏𘡭徽𩛶𞸮𠾰𪓁鸐亄꞉⾅䴫좆髈𘅥🠃𥄪⍦돴곟অ𘢕𩏝驯𤩅䬮𩯯烸ᙞ𨰝𨟄ὖ𪔖𢸔𗿗𨱸맣𩪒碸𔗸𥿋䓂𬭅𩣤𞄡𫶰薐𖫝𬜜𝩩𬉸큤ḇ𥶛𧓝𬋈蚡𣢧𪼥𣡥𖭲𤼣𫢶쯑𦍶릦⬞㹜晎亃𬄗Ꝩ𢡁뮚𪏁𝟐𨽐𤥙⍐𡭄릁𬴈匤𡺦𦕆𫞕볠𐴹慝㫝菼䕤ꠍꯖ肠︡ά𔗹𣁘𖺈蠿𝓉𤀧ᶎ𪾧𤛂𧥧꽈 | [
"[email protected]"
] | |
90a34921333a0a6e00b6e543ba5c3a07f2e7af0c | 2f62291080c180e1f65c15ca300c66e7b75605d3 | /comment/admin.py | 56494bcff01c33399c28e260394c102aee73a006 | [] | no_license | XiaoFei-97/NewBLogSite | 9c3d2a4121a2fd8bc3fe8f2ad42ae1caf297109e | 8f878173eaba82073932811357724536a4c6949f | refs/heads/master | 2020-04-13T02:11:40.620117 | 2019-03-11T07:41:26 | 2019-03-11T07:41:26 | 162,896,336 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 344 | py | from django.contrib import admin # admin后台管理
from .models import Comment # 从当前应用的模型中导入Comment数据表
@admin.register(Comment)
class CommentAdmin(admin.ModelAdmin):
# 后台显示文章对象,评论内容,评论时间,评论者
list_display = ('id', 'content_object', 'text', 'comment_time', 'user')
| [
"[email protected]"
] | |
3947ab0da8b6fc23714ddc19616210640432a080 | 2693c54a5243bb991f5e9ac6aa75b4ce43e3bb22 | /forkan/rl/envs/vae_stack.py | 2fd62da241c285a19d1d41fb8779850af24e72d8 | [
"Unlicense"
] | permissive | llach/forkan | 36f50eda62153b043ec5a6e10513347117635ad9 | 33ae3d48ce6f24fc0c254b93ed3f4b8a767ffea5 | refs/heads/master | 2020-04-01T06:47:26.034989 | 2019-07-21T13:43:03 | 2019-07-21T13:43:03 | 152,964,129 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,506 | py | import logging
import numpy as np
from collections import deque
from forkan.models import VAE
from gym import spaces
from forkan.rl import EnvWrapper
class VAEStack(EnvWrapper):
def __init__(self,
env,
load_from,
k=3,
vae_network='pendulum',
**kwargs,
):
self.logger = logging.getLogger(__name__)
# inheriting from EnvWrapper and passing it an env makes spaces available.
super().__init__(env)
self.k = k
self.v = VAE(load_from=load_from, network=vae_network)
self.observation_space = spaces.Box(low=-np.infty, high=np.infty, shape=(self.v.latent_dim*self.k,),
dtype=np.float)
self.vae_name = self.v.savename
self.q = deque(maxlen=self.k)
self._reset_queue()
def _reset_queue(self):
for _ in range(self.k):
self.q.appendleft([0]*self.v.latent_dim)
def _process(self, obs):
mus, _, _ = self.v.encode(np.expand_dims(obs, 0))
self.q.appendleft(np.squeeze(mus))
def _get_obs(self):
return np.asarray(self.q).flatten()
def step(self, action):
obs, reward, done, info = self.env.step(action)
self._process(obs)
return self._get_obs(), reward, done, info
def reset(self):
self._reset_queue()
obs = self.env.reset()
self._process(obs)
return self._get_obs()
| [
"[email protected]"
] | |
4ea1c3b4e147e92d48a3e0a9fe66894514555851 | 3649dce8b44c72bbfee56adf4e29ca6c5ba2703a | /code_up1440.py | 17945c10ff51ccfa95b13dbf7aa8299ec216180e | [] | no_license | beOk91/code_up | 03c7aca76e955e3a59d797299749e7fc2457f24a | ca1042ce216cc0a80e9b3d3ad363bc29c4ed7690 | refs/heads/master | 2022-12-06T08:23:00.788315 | 2020-08-20T11:21:59 | 2020-08-20T11:21:59 | 284,844,571 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 383 | py | num=int(input())
num_list=list(map(int,input().strip().split()))
for i in range(num):
print("{}:".format(i+1),end=" ")
for j in range(num):
if i!=j:
if num_list[i]<num_list[j]:
print("<",end=" ")
elif num_list[i]==num_list[j]:
print("=",end=" ")
else:
print(">",end=" ")
print()
| [
"[email protected]"
] | |
a55baf3e9516d59c3250ca8f0d14b799a6376e0d | 9f2445e9a00cc34eebcf3d3f60124d0388dcb613 | /2019-12-10-Na_Chan_del_segfault/seg_fault.py | 78fe3fba59fe42bf74641ff0185ff0171b865e62 | [] | no_license | analkumar2/Thesis-work | 7ee916d71f04a60afbd117325df588908518b7d2 | 75905427c2a78a101b4eed2c27a955867c04465c | refs/heads/master | 2022-01-02T02:33:35.864896 | 2021-12-18T03:34:04 | 2021-12-18T03:34:04 | 201,130,673 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,184 | py | # exec(open('seg_fault.py').read())
import moose
import pylab
import rdesigneur as rd
# Wrapper function so that the model can be build and run again and again
def rdeswrapper():
# Deleting any previous run of the model
try:
# [moose.delete(x) for x in ['/model', '/library']]
moose.delete('/model')
except:
pass
######################################
rdes = rd.rdesigneur(
chanProto = [['make_HH_Na()', 'Na'], ['K_A_Chan_(Migliore2018)_ghk.K_A_Chan()', 'K']],
chanDistrib = [
['K', 'soma', 'Gbar', '2000' ],
['Na', 'soma', 'Gbar', '100' ],],
stimList = [['soma', '1', '.', 'inject', '(t>0.1 && t<0.2) * 1e-8' ]],
plotList = [['soma', '1', '.', 'Vm', 'Membrane potential']]
)
rdes.buildModel()
moose.reinit()
moose.start( 0.3 )
rdes.display()
return rdes
# # Initial run
# print('Initial run')
# rdeswrapper()
# Delete library and run
moose.delete('/library')
print('After libsrary deletion and re-build and re-run')
rdeswrapper()
# Delete Na and run
moose.delete('/library/Na')
print('After libsrary/Na deletion and re-build and re-run')
rdeswrapper()
| [
"[email protected]"
] | |
70a98c32d3373e086b562b057378936237a6b801 | 674f5dde693f1a60e4480e5b66fba8f24a9cb95d | /armulator/armv6/opcodes/concrete/ldrd_literal_t1.py | a9c906d27b400388948fff874fff702b14d09748 | [
"MIT"
] | permissive | matan1008/armulator | 75211c18ebc9cd9d33a02890e76fc649483c3aad | 44f4275ab1cafff3cf7a1b760bff7f139dfffb07 | refs/heads/master | 2023-08-17T14:40:52.793120 | 2023-08-08T04:57:02 | 2023-08-08T04:57:02 | 91,716,042 | 29 | 7 | MIT | 2023-08-08T04:55:59 | 2017-05-18T16:37:55 | Python | UTF-8 | Python | false | false | 605 | py | from armulator.armv6.bits_ops import substring, bit_at
from armulator.armv6.opcodes.abstract_opcodes.ldrd_literal import LdrdLiteral
class LdrdLiteralT1(LdrdLiteral):
@staticmethod
def from_bitarray(instr, processor):
imm8 = substring(instr, 7, 0)
rt2 = substring(instr, 11, 8)
rt = substring(instr, 15, 12)
add = bit_at(instr, 23)
imm32 = imm8 << 2
if rt == rt2 or rt in (13, 15) or rt2 in (13, 15) or bit_at(instr, 21):
print('unpredictable')
else:
return LdrdLiteralT1(instr, add=add, imm32=imm32, t=rt, t2=rt2)
| [
"[email protected]"
] | |
ad9b37fcd25235110d2415d4c458f17c588dd19d | b7174170d50b867050c80129dbde239a948f6f85 | /client/filesystem.py | 0b737ab71ebe73a3b6f08c5ad3c40c73f604c827 | [
"MIT"
] | permissive | GreyElaina/pyre-check | c1a2b7a6ee050f606322eaa588f9bd95cd1b3dbc | abcb5daa64c38a25aed9ab238bb61290444ab06c | refs/heads/master | 2022-12-19T21:35:09.582761 | 2020-09-12T05:54:32 | 2020-09-12T05:58:24 | 295,080,507 | 0 | 0 | MIT | 2020-09-13T04:52:19 | 2020-09-13T04:52:18 | null | UTF-8 | Python | false | false | 9,705 | py | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import errno
import fcntl
import functools
import logging
import os
import shutil
import subprocess
from contextlib import contextmanager
from typing import ContextManager, Dict, Generator, Iterable, List, Optional, Set
from .exceptions import EnvironmentException
LOG: logging.Logger = logging.getLogger(__name__)
def assert_readable_directory(directory: str, error_message_prefix: str = "") -> None:
if not os.path.isdir(directory):
raise EnvironmentException(
f"{error_message_prefix}`{directory}` is not a valid directory."
)
if not os.access(directory, os.R_OK):
raise EnvironmentException(
f"{error_message_prefix}`{directory}` is not a readable directory."
)
def readable_directory(directory: str) -> str:
assert_readable_directory(directory)
return directory
def assert_writable_directory(directory: str) -> None:
if not os.path.isdir(directory):
raise EnvironmentException("{} is not a valid directory.".format(directory))
if not os.access(directory, os.W_OK):
raise EnvironmentException("{} is not a writable directory.".format(directory))
def writable_directory(path: str) -> str:
# Create the directory if it does not exist.
try:
os.makedirs(path)
except FileExistsError:
pass
path = os.path.abspath(path)
assert_writable_directory(path)
return path
def translate_path(root: str, path: str) -> str:
if os.path.isabs(path):
return path
translated = os.path.join(root, path)
if os.path.exists(translated):
return os.path.realpath(translated)
return path
def expand_relative_path(root: str, path: str) -> str:
path = os.path.expanduser(path)
if os.path.isabs(path):
return path
else:
return os.path.join(root, path)
def translate_paths(paths: Set[str], original_directory: str) -> Set[str]:
current_directory = os.getcwd()
if not original_directory.startswith(current_directory):
return paths
translation = os.path.relpath(original_directory, current_directory)
if not translation:
return paths
return {translate_path(translation, path) for path in paths}
def exists(path: str) -> str:
if not os.path.isfile(path):
raise ValueError("%s is not a valid file" % path)
return path
def is_parent(parent: str, child: str) -> bool:
return child.startswith(parent.rstrip(os.sep) + os.sep)
def find_paths_with_extensions(root: str, extensions: Iterable[str]) -> List[str]:
root = os.path.abspath(root) # Return absolute paths.
extension_filter = []
for extension in extensions:
if len(extension_filter) > 0:
extension_filter.append("-or")
extension_filter.extend(["-name", "*.{}".format(extension)])
output = (
subprocess.check_output(
[
"find",
root,
# All files ending with the given extensions ...
"(",
*extension_filter,
")",
# ... and that are either regular files ...
"(",
"-type",
"f",
"-or",
# ... or symlinks.
"-type",
"l",
")",
# Print all such files.
"-print",
],
stderr=subprocess.DEVNULL,
)
.decode("utf-8")
.strip()
)
return output.split("\n") if output else []
def find_python_paths(root: str) -> List[str]:
try:
return find_paths_with_extensions(root, ["py", "pyi"])
except subprocess.CalledProcessError:
raise EnvironmentException(
"Pyre was unable to locate an analysis directory. "
"Ensure that your project is built and re-run pyre."
)
def is_empty(path: str) -> bool:
try:
return os.stat(path).st_size == 0
except FileNotFoundError:
return False
def remove_if_exists(path: str) -> None:
try:
os.remove(path)
except OSError:
pass # Not a file.
try:
shutil.rmtree(path)
except OSError:
pass # Not a directory.
def _compute_symbolic_link_mapping(
directory: str, extensions: Iterable[str]
) -> Dict[str, str]:
"""
Given a shared analysis directory, produce a mapping from actual source files
to files contained within this directory. Only includes files which have
one of the provided extensions.
Watchman watches actual source files, so when a change is detected to a
file, this mapping can be used to identify what file changed from Pyre's
perspective.
"""
symbolic_links = {}
try:
for symbolic_link in find_paths_with_extensions(directory, extensions):
symbolic_links[os.path.realpath(symbolic_link)] = symbolic_link
except subprocess.CalledProcessError as error:
LOG.warning(
"Exception encountered trying to find source files "
"in the analysis directory: `%s`",
error,
)
LOG.warning("Starting with an empty set of tracked files.")
return symbolic_links
def _delete_symbolic_link(link_path: str) -> None:
os.unlink(link_path)
def add_symbolic_link(link_path: str, actual_path: str) -> None:
directory = os.path.dirname(link_path)
try:
os.makedirs(directory)
except OSError:
pass
try:
os.symlink(actual_path, link_path)
except OSError as error:
if error.errno == errno.EEXIST:
os.unlink(link_path)
os.symlink(actual_path, link_path)
else:
LOG.error(str(error))
def _lock_command(blocking: bool, is_shared_reader: bool) -> int:
lock_command = fcntl.LOCK_SH if is_shared_reader else fcntl.LOCK_EX
return lock_command if blocking else lock_command | fcntl.LOCK_NB
@contextmanager
def acquire_lock(
path: str, blocking: bool, is_shared_reader: bool = False
) -> Generator[Optional[int], None, None]:
"""Raise an OSError if `blocking` is False and the lock can't be acquired.
If `is_shared_reader=True`, then other processes can acquire the same
lock with `is_shared_reader=True`, but not with `is_shared_reader=False`.
Conversely, if `is_shared_reader=False`, then no other process can
acquire the lock until it is released."""
LOG.debug(
"Trying to acquire %slock on file %s",
"shared reader " if is_shared_reader else "",
path,
)
try:
with open(path, "w+") as lockfile:
try:
fcntl.lockf(
lockfile.fileno(), _lock_command(blocking, is_shared_reader)
)
yield lockfile.fileno()
finally:
fcntl.lockf(lockfile.fileno(), fcntl.LOCK_UN)
except FileNotFoundError:
LOG.debug(f"Unable to acquire lock because lock file {path} was not found")
yield
@contextmanager
def do_nothing() -> Generator[None, None, None]:
yield
def acquire_lock_if_needed(
lock_path: str, blocking: bool, needed: bool
) -> ContextManager[Optional[int]]:
if needed:
return acquire_lock(lock_path, blocking)
else:
return do_nothing()
class Filesystem:
def list(
self, root: str, patterns: List[str], exclude: Optional[List[str]] = None
) -> List[str]:
"""
Return the list of files that match any of the patterns within root.
If exclude is provided, files that match an exclude pattern are omitted.
Note: The `find` command does not understand globs properly.
e.g. 'a/*.py' will match 'a/b/c.py'
For this reason, avoid calling this method with glob patterns.
"""
command = ["find", "."]
command += self._match_any(patterns)
if exclude:
command += ["-and", "!"]
command += self._match_any(exclude)
return (
subprocess.run(command, stdout=subprocess.PIPE, cwd=root)
.stdout.decode("utf-8")
.split()
)
def _match_any(self, patterns: List[str]) -> List[str]:
expression = []
for pattern in patterns:
if expression:
expression.append("-or")
expression.extend(["-path", "./{}".format(pattern)])
return ["(", *expression, ")"]
class MercurialBackedFilesystem(Filesystem):
def list(
self, root: str, patterns: List[str], exclude: Optional[List[str]] = None
) -> List[str]:
try:
command = ["hg", "files"]
for pattern in patterns:
command += ["--include", pattern]
if exclude:
for pattern in exclude:
command += ["--exclude", pattern]
return (
subprocess.run(
command, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, cwd=root
)
.stdout.decode("utf-8")
.split()
)
except FileNotFoundError:
raise EnvironmentException("hg executable not found.")
@functools.lru_cache(1)
def get_filesystem() -> Filesystem:
try:
subprocess.check_output(["hg", "status"], stderr=subprocess.DEVNULL)
return MercurialBackedFilesystem()
except (subprocess.CalledProcessError, FileNotFoundError):
return Filesystem()
| [
"[email protected]"
] | |
a0b81a218a73b11bc6ba7b85118f466015bc7b86 | 5db0fab37c2b8a618d85d3b60fab9f806c416474 | /src/python/pants/backend/python/util_rules/ancestor_files_test.py | fad18565bb0bc12f8bb70ddb75d2d389ba6ec0f4 | [
"Apache-2.0"
] | permissive | pantsbuild/pants | 4988d1ac5474ec95f94ce2218aeb759401e4b011 | 98cbda8545f0d58c586ed2daa76fefd729d5e0d5 | refs/heads/main | 2023-09-05T03:44:17.646899 | 2023-09-01T19:52:09 | 2023-09-01T19:52:09 | 7,209,075 | 2,708 | 593 | Apache-2.0 | 2023-09-14T19:33:33 | 2012-12-17T17:39:04 | Python | UTF-8 | Python | false | false | 3,232 | py | # Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import pytest
from pants.backend.python.util_rules import ancestor_files
from pants.backend.python.util_rules.ancestor_files import (
AncestorFiles,
AncestorFilesRequest,
putative_ancestor_files,
)
from pants.testutil.rule_runner import QueryRule, RuleRunner
@pytest.fixture
def rule_runner() -> RuleRunner:
return RuleRunner(
rules=[
*ancestor_files.rules(),
QueryRule(AncestorFiles, (AncestorFilesRequest,)),
]
)
def assert_injected(
rule_runner: RuleRunner,
*,
input_files: list[str],
empty_files: list[str],
nonempty_files: list[str],
expected_discovered: list[str],
ignore_empty_files: bool,
) -> None:
rule_runner.write_files({**{f: "" for f in empty_files}, **{f: "foo" for f in nonempty_files}})
request = AncestorFilesRequest(
requested=("__init__.py",),
input_files=tuple(input_files),
ignore_empty_files=ignore_empty_files,
)
result = rule_runner.request(AncestorFiles, [request]).snapshot
assert list(result.files) == sorted(expected_discovered)
@pytest.mark.parametrize("ignore_empty_files", [False, True])
def test_rule(rule_runner: RuleRunner, ignore_empty_files: bool) -> None:
assert_injected(
rule_runner,
input_files=[
"src/python/project/lib.py",
"src/python/project/subdir/__init__.py",
"src/python/project/subdir/lib.py",
"src/python/no_init/lib.py",
],
nonempty_files=[
"src/python/__init__.py",
"tests/python/project/__init__.py",
],
empty_files=["src/python/project/__init__.py"],
ignore_empty_files=ignore_empty_files,
expected_discovered=(
["src/python/__init__.py"]
+ ([] if ignore_empty_files else ["src/python/project/__init__.py"])
),
)
def test_identify_missing_ancestor_files() -> None:
assert {
"__init__.py",
"a/__init__.py",
"a/b/__init__.py",
"a/b/c/d/__init__.py",
} == putative_ancestor_files(
requested=("__init__.py",),
input_files=("a/b/foo.py", "a/b/c/__init__.py", "a/b/c/d/bar.py", "a/e/__init__.py"),
)
assert {
"__init__.py",
"src/__init__.py",
"src/python/__init__.py",
"src/python/a/__init__.py",
"src/python/a/b/__init__.py",
"src/python/a/b/c/d/__init__.py",
} == putative_ancestor_files(
requested=("__init__.py",),
input_files=(
"src/python/a/b/foo.py",
"src/python/a/b/c/__init__.py",
"src/python/a/b/c/d/bar.py",
"src/python/a/e/__init__.py",
),
)
assert putative_ancestor_files(requested=("f.py", "f.pyi"), input_files=("subdir/foo.py",)) == {
"f.py",
"f.pyi",
"subdir/f.py",
"subdir/f.pyi",
}
assert putative_ancestor_files(
requested=("f.py", "f.pyi"), input_files=("subdir/foo.pyi",)
) == {"f.py", "f.pyi", "subdir/f.py", "subdir/f.pyi"}
| [
"[email protected]"
] | |
53bfabebe006a235d28336b4fc86a262baa2081b | 4e04db11d891f869a51adf0e0895999d425f29f6 | /portalbackend/lendapi/reporting/migrations/0002_auto_20170824_1910.py | 0de31b6204ded3007b1ffb8f42d330f6c538b71c | [] | no_license | mthangaraj/ix-ec-backend | 21e2d4b642c1174b53a86cd1a15564f99985d23f | 11b80dbd665e3592ed862403dd8c8d65b6791b30 | refs/heads/master | 2022-12-12T12:21:29.237675 | 2018-06-20T13:10:21 | 2018-06-20T13:10:21 | 138,033,811 | 0 | 0 | null | 2022-06-27T16:54:14 | 2018-06-20T13:04:22 | JavaScript | UTF-8 | Python | false | false | 859 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-24 19:10
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('reporting', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='financialstatemententry',
name='item_category',
),
migrations.RemoveField(
model_name='financialstatemententry',
name='item_name',
),
migrations.AddField(
model_name='financialstatemententry',
name='statement_type',
field=models.CharField(choices=[('Income Statement', 'Income Statement'), ('Balance Sheet', 'Balance Sheet'), ('Cash Flow', 'Cash Flow')], default='Income Statement', max_length=60),
),
]
| [
"[email protected]"
] | |
160d892ebcaa4753819cb92404d0a04333cf103a | d6af6c213b5be4a46f384a6667d8dbdd00289e37 | /nfselib/ginfes/v3_01/servico_consultar_nfse_envio_v03.py | bb001c2fb0dc9352fd0c9baffd9ab74a26424a7c | [
"MIT"
] | permissive | erpbrasil/nfselib | ea940d5b130709bc1b19f0cdb6a3fbb0aebac6b3 | dc90cf4b6f2fc9db52bbe9485fb0901b56d3aa71 | refs/heads/generated | 2021-01-02T21:35:02.240826 | 2020-07-21T13:48:20 | 2020-07-21T13:48:20 | 239,811,475 | 0 | 1 | MIT | 2020-12-26T22:56:36 | 2020-02-11T16:35:57 | Python | UTF-8 | Python | false | false | 388,953 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated Thu Dec 13 15:47:45 2018 by generateDS.py version 2.29.2.
# Python 3.7.1 (default, Oct 22 2018, 10:41:28) [GCC 8.2.1 20180831]
#
# Command line options:
# ('--no-namespace-defs', '')
# ('-o', 'nfselib/v3_01/servico_consultar_nfse_envio_v03.py')
#
# Command line arguments:
# schemas/v3_01/servico_consultar_nfse_envio_v03.xsd
#
# Command line:
# /usr/bin/generateDS --no-namespace-defs -o "nfselib/v3_01/servico_consultar_nfse_envio_v03.py" schemas/v3_01/servico_consultar_nfse_envio_v03.xsd
#
# Current working directory (os.getcwd()):
# nfse
#
from __future__ import unicode_literals
import sys
import re as re_
import base64
import datetime as datetime_
import warnings as warnings_
from builtins import str
try:
from lxml import etree as etree_
except ImportError:
from xml.etree import ElementTree as etree_
Validate_simpletypes_ = True
if sys.version_info.major == 2:
BaseStrType_ = basestring
else:
BaseStrType_ = str
def parsexml_(infile, parser=None, **kwargs):
if parser is None:
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
try:
parser = etree_.ETCompatXMLParser()
except AttributeError:
# fallback to xml.etree
parser = etree_.XMLParser()
doc = etree_.parse(infile, parser=parser, **kwargs)
return doc
#
# Namespace prefix definition table (and other attributes, too)
#
# The module generatedsnamespaces, if it is importable, must contain
# a dictionary named GeneratedsNamespaceDefs. This Python dictionary
# should map element type names (strings) to XML schema namespace prefix
# definitions. The export method for any class for which there is
# a namespace prefix definition, will export that definition in the
# XML representation of that element. See the export method of
# any generated element type class for a example of the use of this
# table.
# A sample table is:
#
# # File: generatedsnamespaces.py
#
# GenerateDSNamespaceDefs = {
# "ElementtypeA": "http://www.xxx.com/namespaceA",
# "ElementtypeB": "http://www.xxx.com/namespaceB",
# }
#
try:
from nfselib.ginfes.v3_01.generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_
except ImportError:
GenerateDSNamespaceDefs_ = {}
#
# The root super-class for element type classes
#
# Calls to the methods in these classes are generated by generateDS.py.
# You can replace these methods by re-implementing the following class
# in a module named generatedssuper.py.
try:
from generatedssuper import GeneratedsSuper
except ImportError as exp:
class GeneratedsSuper(object):
tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$')
class _FixedOffsetTZ(datetime_.tzinfo):
def __init__(self, offset, name):
self.__offset = datetime_.timedelta(minutes=offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return None
def gds_format_string(self, input_data, input_name=''):
return input_data
def gds_validate_string(self, input_data, node=None, input_name=''):
if not input_data:
return ''
else:
return input_data
def gds_format_base64(self, input_data, input_name=''):
return base64.b64encode(input_data)
def gds_validate_base64(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer(self, input_data, input_name=''):
return '%d' % input_data
def gds_validate_integer(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_integer_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
int(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of integers')
return values
def gds_format_float(self, input_data, input_name=''):
return ('%.15f' % input_data).rstrip('0')
def gds_validate_float(self, input_data, node=None, input_name=''):
return input_data
def gds_format_float_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_float_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of floats')
return values
def gds_format_double(self, input_data, input_name=''):
return '%e' % input_data
def gds_validate_double(self, input_data, node=None, input_name=''):
return input_data
def gds_format_double_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_double_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of doubles')
return values
def gds_format_boolean(self, input_data, input_name=''):
return ('%s' % input_data).lower()
def gds_validate_boolean(self, input_data, node=None, input_name=''):
return input_data
def gds_format_boolean_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_boolean_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
if value not in ('true', '1', 'false', '0', ):
raise_parse_error(
node,
'Requires sequence of booleans '
'("true", "1", "false", "0")')
return values
def gds_validate_datetime(self, input_data, node=None, input_name=''):
return input_data
def gds_format_datetime(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
@classmethod
def gds_parse_datetime(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
time_parts = input_data.split('.')
if len(time_parts) > 1:
micro_seconds = int(float('0.' + time_parts[1]) * 1000000)
input_data = '%s.%s' % (time_parts[0], micro_seconds, )
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt
def gds_validate_date(self, input_data, node=None, input_name=''):
return input_data
def gds_format_date(self, input_data, input_name=''):
_svalue = '%04d-%02d-%02d' % (
input_data.year,
input_data.month,
input_data.day,
)
try:
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(
hours, minutes)
except AttributeError:
pass
return _svalue
@classmethod
def gds_parse_date(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d')
dt = dt.replace(tzinfo=tz)
return dt.date()
def gds_validate_time(self, input_data, node=None, input_name=''):
return input_data
def gds_format_time(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%02d:%02d:%02d' % (
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%02d:%02d:%02d.%s' % (
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
def gds_validate_simple_patterns(self, patterns, target):
# pat is a list of lists of strings/patterns. We should:
# - AND the outer elements
# - OR the inner elements
found1 = True
for patterns1 in patterns:
found2 = False
for patterns2 in patterns1:
if re_.search(patterns2, target) is not None:
found2 = True
break
if not found2:
found1 = False
break
return found1
@classmethod
def gds_parse_time(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
if len(input_data.split('.')) > 1:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt.time()
def gds_str_lower(self, instring):
return instring.lower()
def get_path_(self, node):
path_list = []
self.get_path_list_(node, path_list)
path_list.reverse()
path = '/'.join(path_list)
return path
Tag_strip_pattern_ = re_.compile(r'\{.*\}')
def get_path_list_(self, node, path_list):
if node is None:
return
tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
if tag:
path_list.append(tag)
self.get_path_list_(node.getparent(), path_list)
def get_class_obj_(self, node, default_class=None):
class_obj1 = default_class
if 'xsi' in node.nsmap:
classname = node.get('{%s}type' % node.nsmap['xsi'])
if classname is not None:
names = classname.split(':')
if len(names) == 2:
classname = names[1]
class_obj2 = globals().get(classname)
if class_obj2 is not None:
class_obj1 = class_obj2
return class_obj1
def gds_build_any(self, node, type_name=None):
return None
@classmethod
def gds_reverse_node_mapping(cls, mapping):
return dict(((v, k) for k, v in mapping.iteritems()))
@staticmethod
def gds_encode(instring):
if sys.version_info.major == 2 and not isinstance(instring, unicode):
return instring.encode(ExternalEncoding)
else:
return instring
@staticmethod
def convert_unicode(instring):
if isinstance(instring, str):
result = quote_xml(instring)
elif sys.version_info.major == 2 and isinstance(instring, unicode):
result = quote_xml(instring).encode('utf8')
else:
result = GeneratedsSuper.gds_encode(str(instring))
return result
def __eq__(self, other):
if type(self) != type(other):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self.__eq__(other)
def getSubclassFromModule_(module, class_):
'''Get the subclass of a class from a specific module.'''
name = class_.__name__ + 'Sub'
if hasattr(module, name):
return getattr(module, name)
else:
return None
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Globals
#
ExternalEncoding = 'utf-8'
Tag_pattern_ = re_.compile(r'({.*})?(.*)')
String_cleanup_pat_ = re_.compile(r"[\n\r\s]+")
Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)')
CDATA_pattern_ = re_.compile(r"<!\[CDATA\[.*?\]\]>", re_.DOTALL)
# Change this to redirect the generated superclass module to use a
# specific subclass module.
CurrentSubclassModule_ = None
#
# Support/utility functions.
#
def showIndent(outfile, level, pretty_print=True):
if pretty_print:
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
"Escape markup chars, but do not modify CDATA sections."
if not inStr:
return ''
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s2 = ''
pos = 0
matchobjects = CDATA_pattern_.finditer(s1)
for mo in matchobjects:
s3 = s1[pos:mo.start()]
s2 += quote_xml_aux(s3)
s2 += s1[mo.start():mo.end()]
pos = mo.end()
s3 = s1[pos:]
s2 += quote_xml_aux(s3)
return s2
def quote_xml_aux(inStr):
s1 = inStr.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
return s1
def quote_attrib(inStr):
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find('\n') == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
def get_all_text_(node):
if node.text is not None:
text = node.text
else:
text = ''
for child in node:
if child.tail is not None:
text += child.tail
return text
def find_attr_value_(attr_name, node):
attrs = node.attrib
attr_parts = attr_name.split(':')
value = None
if len(attr_parts) == 1:
value = attrs.get(attr_name)
elif len(attr_parts) == 2:
prefix, name = attr_parts
namespace = node.nsmap.get(prefix)
if namespace is not None:
value = attrs.get('{%s}%s' % (namespace, name, ))
return value
class GDSParseError(Exception):
pass
def raise_parse_error(node, msg):
msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, )
raise GDSParseError(msg)
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
TypeBase64 = 8
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name, namespace,
pretty_print=True):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(
outfile, level, namespace, name,
pretty_print=pretty_print)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeBase64:
outfile.write('<%s>%s</%s>' % (
self.name,
base64.b64encode(self.value),
self.name))
def to_etree(self, element):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
if len(element) > 0:
if element[-1].tail is None:
element[-1].tail = self.value
else:
element[-1].tail += self.value
else:
if element.text is None:
element.text = self.value
else:
element.text += self.value
elif self.category == MixedContainer.CategorySimple:
subelement = etree_.SubElement(
element, '%s' % self.name)
subelement.text = self.to_etree_simple()
else: # category == MixedContainer.CategoryComplex
self.value.to_etree(element)
def to_etree_simple(self):
if self.content_type == MixedContainer.TypeString:
text = self.value
elif (self.content_type == MixedContainer.TypeInteger or
self.content_type == MixedContainer.TypeBoolean):
text = '%d' % self.value
elif (self.content_type == MixedContainer.TypeFloat or
self.content_type == MixedContainer.TypeDecimal):
text = '%f' % self.value
elif self.content_type == MixedContainer.TypeDouble:
text = '%g' % self.value
elif self.content_type == MixedContainer.TypeBase64:
text = '%s' % base64.b64encode(self.value)
return text
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s",\n' % (
self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
class MemberSpec_(object):
def __init__(self, name='', data_type='', container=0,
optional=0, child_attrs=None, choice=None,
documentation=""):
self.name = name
self.data_type = data_type
self.container = container
self.child_attrs = child_attrs
self.choice = choice
self.optional = optional
self.documentation = documentation
def set_name(self, name): self.name = name
def get_name(self): return self.name
def set_data_type(self, data_type): self.data_type = data_type
def get_data_type_chain(self): return self.data_type
def get_data_type(self):
if isinstance(self.data_type, list):
if len(self.data_type) > 0:
return self.data_type[-1]
else:
return 'xs:string'
else:
return self.data_type
def set_container(self, container): self.container = container
def get_container(self): return self.container
def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs
def get_child_attrs(self): return self.child_attrs
def set_choice(self, choice): self.choice = choice
def get_choice(self): return self.choice
def set_optional(self, optional): self.optional = optional
def get_optional(self): return self.optional
def get_documentation(self): return self.documentation
def _cast(typ, value):
if typ is None or value is None:
return value
return typ(value)
#
# Data representation classes.
#
class ConsultarNfseEnvio(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, Prestador=None, NumeroNfse=None, PeriodoEmissao=None, Tomador=None, IntermediarioServico=None, Signature=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.Prestador = Prestador
self.NumeroNfse = NumeroNfse
self.validate_tsNumeroNfse(self.NumeroNfse)
self.PeriodoEmissao = PeriodoEmissao
self.Tomador = Tomador
self.IntermediarioServico = IntermediarioServico
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ConsultarNfseEnvio)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ConsultarNfseEnvio.subclass:
return ConsultarNfseEnvio.subclass(*args_, **kwargs_)
else:
return ConsultarNfseEnvio(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Prestador(self): return self.Prestador
def set_Prestador(self, Prestador): self.Prestador = Prestador
def get_NumeroNfse(self): return self.NumeroNfse
def set_NumeroNfse(self, NumeroNfse): self.NumeroNfse = NumeroNfse
def get_PeriodoEmissao(self): return self.PeriodoEmissao
def set_PeriodoEmissao(self, PeriodoEmissao): self.PeriodoEmissao = PeriodoEmissao
def get_Tomador(self): return self.Tomador
def set_Tomador(self, Tomador): self.Tomador = Tomador
def get_IntermediarioServico(self): return self.IntermediarioServico
def set_IntermediarioServico(self, IntermediarioServico): self.IntermediarioServico = IntermediarioServico
def get_Signature(self): return self.Signature
def set_Signature(self, Signature): self.Signature = Signature
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def validate_tsNumeroNfse(self, value):
# Validate type tsNumeroNfse, a restriction on xsd:nonNegativeInteger.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsNumeroNfse' % {"value" : value} )
def hasContent_(self):
if (
self.Prestador is not None or
self.NumeroNfse is not None or
self.PeriodoEmissao is not None or
self.Tomador is not None or
self.IntermediarioServico is not None or
self.Signature is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ConsultarNfseEnvio', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ConsultarNfseEnvio')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ConsultarNfseEnvio')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ConsultarNfseEnvio', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ConsultarNfseEnvio'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='ConsultarNfseEnvio', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Prestador is not None:
self.Prestador.export(outfile, level, namespace_, name_='Prestador', pretty_print=pretty_print)
if self.NumeroNfse is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<NumeroNfse>%s</NumeroNfse>%s' % (self.gds_format_integer(self.NumeroNfse, input_name='NumeroNfse'), eol_))
if self.PeriodoEmissao is not None:
self.PeriodoEmissao.export(outfile, level, namespace_, name_='PeriodoEmissao', pretty_print=pretty_print)
if self.Tomador is not None:
self.Tomador.export(outfile, level, namespace_, name_='Tomador', pretty_print=pretty_print)
if self.IntermediarioServico is not None:
self.IntermediarioServico.export(outfile, level, namespace_, name_='IntermediarioServico', pretty_print=pretty_print)
if self.Signature is not None:
self.Signature.export(outfile, level, namespace_='dsig:', name_='Signature', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Prestador':
obj_ = tcIdentificacaoPrestador.factory()
obj_.build(child_)
self.Prestador = obj_
obj_.original_tagname_ = 'Prestador'
elif nodeName_ == 'NumeroNfse':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'NumeroNfse')
self.NumeroNfse = ival_
# validate type tsNumeroNfse
self.validate_tsNumeroNfse(self.NumeroNfse)
elif nodeName_ == 'PeriodoEmissao':
obj_ = PeriodoEmissaoType.factory()
obj_.build(child_)
self.PeriodoEmissao = obj_
obj_.original_tagname_ = 'PeriodoEmissao'
elif nodeName_ == 'Tomador':
obj_ = tcIdentificacaoTomador.factory()
obj_.build(child_)
self.Tomador = obj_
obj_.original_tagname_ = 'Tomador'
elif nodeName_ == 'IntermediarioServico':
obj_ = tcIdentificacaoIntermediarioServico.factory()
obj_.build(child_)
self.IntermediarioServico = obj_
obj_.original_tagname_ = 'IntermediarioServico'
elif nodeName_ == 'Signature':
obj_ = SignatureType.factory()
obj_.build(child_)
self.Signature = obj_
obj_.original_tagname_ = 'Signature'
# end class ConsultarNfseEnvio
class tcCpfCnpj(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Cpf=None, Cnpj=None):
self.original_tagname_ = None
self.Cpf = Cpf
self.validate_tsCpf(self.Cpf)
self.Cnpj = Cnpj
self.validate_tsCnpj(self.Cnpj)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcCpfCnpj)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcCpfCnpj.subclass:
return tcCpfCnpj.subclass(*args_, **kwargs_)
else:
return tcCpfCnpj(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Cpf(self): return self.Cpf
def set_Cpf(self, Cpf): self.Cpf = Cpf
def get_Cnpj(self): return self.Cnpj
def set_Cnpj(self, Cnpj): self.Cnpj = Cnpj
def validate_tsCpf(self, value):
# Validate type tsCpf, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) != 11:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on tsCpf' % {"value" : value.encode("utf-8")} )
def validate_tsCnpj(self, value):
# Validate type tsCnpj, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) != 14:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on tsCnpj' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.Cpf is not None or
self.Cnpj is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcCpfCnpj', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcCpfCnpj')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcCpfCnpj')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcCpfCnpj', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcCpfCnpj'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcCpfCnpj', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Cpf is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Cpf>%s</Cpf>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Cpf), input_name='Cpf')), eol_))
if self.Cnpj is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Cnpj>%s</Cnpj>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Cnpj), input_name='Cnpj')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Cpf':
Cpf_ = child_.text
Cpf_ = self.gds_validate_string(Cpf_, node, 'Cpf')
self.Cpf = Cpf_
# validate type tsCpf
self.validate_tsCpf(self.Cpf)
elif nodeName_ == 'Cnpj':
Cnpj_ = child_.text
Cnpj_ = self.gds_validate_string(Cnpj_, node, 'Cnpj')
self.Cnpj = Cnpj_
# validate type tsCnpj
self.validate_tsCnpj(self.Cnpj)
# end class tcCpfCnpj
class tcEndereco(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Endereco=None, Numero=None, Complemento=None, Bairro=None, CodigoMunicipio=None, Uf=None, Cep=None):
self.original_tagname_ = None
self.Endereco = Endereco
self.validate_tsEndereco(self.Endereco)
self.Numero = Numero
self.validate_tsNumeroEndereco(self.Numero)
self.Complemento = Complemento
self.validate_tsComplementoEndereco(self.Complemento)
self.Bairro = Bairro
self.validate_tsBairro(self.Bairro)
self.CodigoMunicipio = CodigoMunicipio
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
self.Uf = Uf
self.validate_tsUf(self.Uf)
self.Cep = Cep
self.validate_tsCep(self.Cep)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcEndereco)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcEndereco.subclass:
return tcEndereco.subclass(*args_, **kwargs_)
else:
return tcEndereco(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Endereco(self): return self.Endereco
def set_Endereco(self, Endereco): self.Endereco = Endereco
def get_Numero(self): return self.Numero
def set_Numero(self, Numero): self.Numero = Numero
def get_Complemento(self): return self.Complemento
def set_Complemento(self, Complemento): self.Complemento = Complemento
def get_Bairro(self): return self.Bairro
def set_Bairro(self, Bairro): self.Bairro = Bairro
def get_CodigoMunicipio(self): return self.CodigoMunicipio
def set_CodigoMunicipio(self, CodigoMunicipio): self.CodigoMunicipio = CodigoMunicipio
def get_Uf(self): return self.Uf
def set_Uf(self, Uf): self.Uf = Uf
def get_Cep(self): return self.Cep
def set_Cep(self, Cep): self.Cep = Cep
def validate_tsEndereco(self, value):
# Validate type tsEndereco, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 125:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsEndereco' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsEndereco' % {"value" : value.encode("utf-8")} )
def validate_tsNumeroEndereco(self, value):
# Validate type tsNumeroEndereco, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 10:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsNumeroEndereco' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsNumeroEndereco' % {"value" : value.encode("utf-8")} )
def validate_tsComplementoEndereco(self, value):
# Validate type tsComplementoEndereco, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 60:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsComplementoEndereco' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsComplementoEndereco' % {"value" : value.encode("utf-8")} )
def validate_tsBairro(self, value):
# Validate type tsBairro, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 60:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsBairro' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsBairro' % {"value" : value.encode("utf-8")} )
def validate_tsCodigoMunicipioIbge(self, value):
# Validate type tsCodigoMunicipioIbge, a restriction on xsd:int.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 7:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsCodigoMunicipioIbge' % {"value" : value} )
def validate_tsUf(self, value):
# Validate type tsUf, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) != 2:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on tsUf' % {"value" : value.encode("utf-8")} )
def validate_tsCep(self, value):
# Validate type tsCep, a restriction on xsd:int.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 8:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsCep' % {"value" : value} )
def hasContent_(self):
if (
self.Endereco is not None or
self.Numero is not None or
self.Complemento is not None or
self.Bairro is not None or
self.CodigoMunicipio is not None or
self.Uf is not None or
self.Cep is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcEndereco', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcEndereco')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcEndereco')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcEndereco', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcEndereco'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcEndereco', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Endereco is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Endereco>%s</Endereco>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Endereco), input_name='Endereco')), eol_))
if self.Numero is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Numero>%s</Numero>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Numero), input_name='Numero')), eol_))
if self.Complemento is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Complemento>%s</Complemento>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Complemento), input_name='Complemento')), eol_))
if self.Bairro is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Bairro>%s</Bairro>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Bairro), input_name='Bairro')), eol_))
if self.CodigoMunicipio is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoMunicipio>%s</CodigoMunicipio>%s' % (self.gds_format_integer(self.CodigoMunicipio, input_name='CodigoMunicipio'), eol_))
if self.Uf is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Uf>%s</Uf>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Uf), input_name='Uf')), eol_))
if self.Cep is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Cep>%s</Cep>%s' % (self.gds_format_integer(self.Cep, input_name='Cep'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Endereco':
Endereco_ = child_.text
Endereco_ = self.gds_validate_string(Endereco_, node, 'Endereco')
self.Endereco = Endereco_
# validate type tsEndereco
self.validate_tsEndereco(self.Endereco)
elif nodeName_ == 'Numero':
Numero_ = child_.text
Numero_ = self.gds_validate_string(Numero_, node, 'Numero')
self.Numero = Numero_
# validate type tsNumeroEndereco
self.validate_tsNumeroEndereco(self.Numero)
elif nodeName_ == 'Complemento':
Complemento_ = child_.text
Complemento_ = self.gds_validate_string(Complemento_, node, 'Complemento')
self.Complemento = Complemento_
# validate type tsComplementoEndereco
self.validate_tsComplementoEndereco(self.Complemento)
elif nodeName_ == 'Bairro':
Bairro_ = child_.text
Bairro_ = self.gds_validate_string(Bairro_, node, 'Bairro')
self.Bairro = Bairro_
# validate type tsBairro
self.validate_tsBairro(self.Bairro)
elif nodeName_ == 'CodigoMunicipio':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CodigoMunicipio')
self.CodigoMunicipio = ival_
# validate type tsCodigoMunicipioIbge
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
elif nodeName_ == 'Uf':
Uf_ = child_.text
Uf_ = self.gds_validate_string(Uf_, node, 'Uf')
self.Uf = Uf_
# validate type tsUf
self.validate_tsUf(self.Uf)
elif nodeName_ == 'Cep':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'Cep')
self.Cep = ival_
# validate type tsCep
self.validate_tsCep(self.Cep)
# end class tcEndereco
class tcContato(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Telefone=None, Email=None):
self.original_tagname_ = None
self.Telefone = Telefone
self.validate_tsTelefone(self.Telefone)
self.Email = Email
self.validate_tsEmail(self.Email)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcContato)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcContato.subclass:
return tcContato.subclass(*args_, **kwargs_)
else:
return tcContato(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Telefone(self): return self.Telefone
def set_Telefone(self, Telefone): self.Telefone = Telefone
def get_Email(self): return self.Email
def set_Email(self, Email): self.Email = Email
def validate_tsTelefone(self, value):
# Validate type tsTelefone, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 11:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsTelefone' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsTelefone' % {"value" : value.encode("utf-8")} )
def validate_tsEmail(self, value):
# Validate type tsEmail, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 80:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsEmail' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsEmail' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.Telefone is not None or
self.Email is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcContato', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcContato')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcContato')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcContato', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcContato'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcContato', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Telefone is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Telefone>%s</Telefone>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Telefone), input_name='Telefone')), eol_))
if self.Email is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Email>%s</Email>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Email), input_name='Email')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Telefone':
Telefone_ = child_.text
Telefone_ = self.gds_validate_string(Telefone_, node, 'Telefone')
self.Telefone = Telefone_
# validate type tsTelefone
self.validate_tsTelefone(self.Telefone)
elif nodeName_ == 'Email':
Email_ = child_.text
Email_ = self.gds_validate_string(Email_, node, 'Email')
self.Email = Email_
# validate type tsEmail
self.validate_tsEmail(self.Email)
# end class tcContato
class tcIdentificacaoOrgaoGerador(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, CodigoMunicipio=None, Uf=None):
self.original_tagname_ = None
self.CodigoMunicipio = CodigoMunicipio
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
self.Uf = Uf
self.validate_tsUf(self.Uf)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcIdentificacaoOrgaoGerador)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcIdentificacaoOrgaoGerador.subclass:
return tcIdentificacaoOrgaoGerador.subclass(*args_, **kwargs_)
else:
return tcIdentificacaoOrgaoGerador(*args_, **kwargs_)
factory = staticmethod(factory)
def get_CodigoMunicipio(self): return self.CodigoMunicipio
def set_CodigoMunicipio(self, CodigoMunicipio): self.CodigoMunicipio = CodigoMunicipio
def get_Uf(self): return self.Uf
def set_Uf(self, Uf): self.Uf = Uf
def validate_tsCodigoMunicipioIbge(self, value):
# Validate type tsCodigoMunicipioIbge, a restriction on xsd:int.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 7:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsCodigoMunicipioIbge' % {"value" : value} )
def validate_tsUf(self, value):
# Validate type tsUf, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) != 2:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on tsUf' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.CodigoMunicipio is not None or
self.Uf is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcIdentificacaoOrgaoGerador', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcIdentificacaoOrgaoGerador')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcIdentificacaoOrgaoGerador')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcIdentificacaoOrgaoGerador', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcIdentificacaoOrgaoGerador'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcIdentificacaoOrgaoGerador', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CodigoMunicipio is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoMunicipio>%s</CodigoMunicipio>%s' % (self.gds_format_integer(self.CodigoMunicipio, input_name='CodigoMunicipio'), eol_))
if self.Uf is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Uf>%s</Uf>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Uf), input_name='Uf')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CodigoMunicipio':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CodigoMunicipio')
self.CodigoMunicipio = ival_
# validate type tsCodigoMunicipioIbge
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
elif nodeName_ == 'Uf':
Uf_ = child_.text
Uf_ = self.gds_validate_string(Uf_, node, 'Uf')
self.Uf = Uf_
# validate type tsUf
self.validate_tsUf(self.Uf)
# end class tcIdentificacaoOrgaoGerador
class tcIdentificacaoRps(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Numero=None, Serie=None, Tipo=None):
self.original_tagname_ = None
self.Numero = Numero
self.validate_tsNumeroRps(self.Numero)
self.Serie = Serie
self.validate_tsSerieRps(self.Serie)
self.Tipo = Tipo
self.validate_tsTipoRps(self.Tipo)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcIdentificacaoRps)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcIdentificacaoRps.subclass:
return tcIdentificacaoRps.subclass(*args_, **kwargs_)
else:
return tcIdentificacaoRps(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Numero(self): return self.Numero
def set_Numero(self, Numero): self.Numero = Numero
def get_Serie(self): return self.Serie
def set_Serie(self, Serie): self.Serie = Serie
def get_Tipo(self): return self.Tipo
def set_Tipo(self, Tipo): self.Tipo = Tipo
def validate_tsNumeroRps(self, value):
# Validate type tsNumeroRps, a restriction on xsd:nonNegativeInteger.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsNumeroRps' % {"value" : value} )
def validate_tsSerieRps(self, value):
# Validate type tsSerieRps, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 5:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsSerieRps' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsSerieRps' % {"value" : value.encode("utf-8")} )
def validate_tsTipoRps(self, value):
# Validate type tsTipoRps, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsTipoRps_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsTipoRps_patterns_, ))
validate_tsTipoRps_patterns_ = [['^1$|^2$|^3$']]
def hasContent_(self):
if (
self.Numero is not None or
self.Serie is not None or
self.Tipo is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcIdentificacaoRps', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcIdentificacaoRps')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcIdentificacaoRps')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcIdentificacaoRps', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcIdentificacaoRps'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcIdentificacaoRps', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Numero is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Numero>%s</Numero>%s' % (self.gds_format_integer(self.Numero, input_name='Numero'), eol_))
if self.Serie is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Serie>%s</Serie>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Serie), input_name='Serie')), eol_))
if self.Tipo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Tipo>%s</Tipo>%s' % (self.gds_format_integer(self.Tipo, input_name='Tipo'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Numero':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'Numero')
self.Numero = ival_
# validate type tsNumeroRps
self.validate_tsNumeroRps(self.Numero)
elif nodeName_ == 'Serie':
Serie_ = child_.text
Serie_ = self.gds_validate_string(Serie_, node, 'Serie')
self.Serie = Serie_
# validate type tsSerieRps
self.validate_tsSerieRps(self.Serie)
elif nodeName_ == 'Tipo':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'Tipo')
self.Tipo = ival_
# validate type tsTipoRps
self.validate_tsTipoRps(self.Tipo)
# end class tcIdentificacaoRps
class tcIdentificacaoPrestador(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Cnpj=None, InscricaoMunicipal=None):
self.original_tagname_ = None
self.Cnpj = Cnpj
self.validate_tsCnpj(self.Cnpj)
self.InscricaoMunicipal = InscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcIdentificacaoPrestador)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcIdentificacaoPrestador.subclass:
return tcIdentificacaoPrestador.subclass(*args_, **kwargs_)
else:
return tcIdentificacaoPrestador(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Cnpj(self): return self.Cnpj
def set_Cnpj(self, Cnpj): self.Cnpj = Cnpj
def get_InscricaoMunicipal(self): return self.InscricaoMunicipal
def set_InscricaoMunicipal(self, InscricaoMunicipal): self.InscricaoMunicipal = InscricaoMunicipal
def validate_tsCnpj(self, value):
# Validate type tsCnpj, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) != 14:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on tsCnpj' % {"value" : value.encode("utf-8")} )
def validate_tsInscricaoMunicipal(self, value):
# Validate type tsInscricaoMunicipal, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 15:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.Cnpj is not None or
self.InscricaoMunicipal is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcIdentificacaoPrestador', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcIdentificacaoPrestador')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcIdentificacaoPrestador')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcIdentificacaoPrestador', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcIdentificacaoPrestador'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcIdentificacaoPrestador', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Cnpj is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Cnpj>%s</Cnpj>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Cnpj), input_name='Cnpj')), eol_))
if self.InscricaoMunicipal is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<InscricaoMunicipal>%s</InscricaoMunicipal>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.InscricaoMunicipal), input_name='InscricaoMunicipal')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Cnpj':
Cnpj_ = child_.text
Cnpj_ = self.gds_validate_string(Cnpj_, node, 'Cnpj')
self.Cnpj = Cnpj_
# validate type tsCnpj
self.validate_tsCnpj(self.Cnpj)
elif nodeName_ == 'InscricaoMunicipal':
InscricaoMunicipal_ = child_.text
InscricaoMunicipal_ = self.gds_validate_string(InscricaoMunicipal_, node, 'InscricaoMunicipal')
self.InscricaoMunicipal = InscricaoMunicipal_
# validate type tsInscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
# end class tcIdentificacaoPrestador
class tcIdentificacaoTomador(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, CpfCnpj=None, InscricaoMunicipal=None):
self.original_tagname_ = None
self.CpfCnpj = CpfCnpj
self.InscricaoMunicipal = InscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcIdentificacaoTomador)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcIdentificacaoTomador.subclass:
return tcIdentificacaoTomador.subclass(*args_, **kwargs_)
else:
return tcIdentificacaoTomador(*args_, **kwargs_)
factory = staticmethod(factory)
def get_CpfCnpj(self): return self.CpfCnpj
def set_CpfCnpj(self, CpfCnpj): self.CpfCnpj = CpfCnpj
def get_InscricaoMunicipal(self): return self.InscricaoMunicipal
def set_InscricaoMunicipal(self, InscricaoMunicipal): self.InscricaoMunicipal = InscricaoMunicipal
def validate_tsInscricaoMunicipal(self, value):
# Validate type tsInscricaoMunicipal, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 15:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.CpfCnpj is not None or
self.InscricaoMunicipal is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcIdentificacaoTomador', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcIdentificacaoTomador')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcIdentificacaoTomador')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcIdentificacaoTomador', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcIdentificacaoTomador'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcIdentificacaoTomador', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CpfCnpj is not None:
self.CpfCnpj.export(outfile, level, namespace_, name_='CpfCnpj', pretty_print=pretty_print)
if self.InscricaoMunicipal is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<InscricaoMunicipal>%s</InscricaoMunicipal>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.InscricaoMunicipal), input_name='InscricaoMunicipal')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CpfCnpj':
obj_ = tcCpfCnpj.factory()
obj_.build(child_)
self.CpfCnpj = obj_
obj_.original_tagname_ = 'CpfCnpj'
elif nodeName_ == 'InscricaoMunicipal':
InscricaoMunicipal_ = child_.text
InscricaoMunicipal_ = self.gds_validate_string(InscricaoMunicipal_, node, 'InscricaoMunicipal')
self.InscricaoMunicipal = InscricaoMunicipal_
# validate type tsInscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
# end class tcIdentificacaoTomador
class tcDadosTomador(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, IdentificacaoTomador=None, RazaoSocial=None, Endereco=None, Contato=None):
self.original_tagname_ = None
self.IdentificacaoTomador = IdentificacaoTomador
self.RazaoSocial = RazaoSocial
self.validate_tsRazaoSocial(self.RazaoSocial)
self.Endereco = Endereco
self.Contato = Contato
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcDadosTomador)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcDadosTomador.subclass:
return tcDadosTomador.subclass(*args_, **kwargs_)
else:
return tcDadosTomador(*args_, **kwargs_)
factory = staticmethod(factory)
def get_IdentificacaoTomador(self): return self.IdentificacaoTomador
def set_IdentificacaoTomador(self, IdentificacaoTomador): self.IdentificacaoTomador = IdentificacaoTomador
def get_RazaoSocial(self): return self.RazaoSocial
def set_RazaoSocial(self, RazaoSocial): self.RazaoSocial = RazaoSocial
def get_Endereco(self): return self.Endereco
def set_Endereco(self, Endereco): self.Endereco = Endereco
def get_Contato(self): return self.Contato
def set_Contato(self, Contato): self.Contato = Contato
def validate_tsRazaoSocial(self, value):
# Validate type tsRazaoSocial, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 115:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsRazaoSocial' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsRazaoSocial' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.IdentificacaoTomador is not None or
self.RazaoSocial is not None or
self.Endereco is not None or
self.Contato is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcDadosTomador', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcDadosTomador')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcDadosTomador')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcDadosTomador', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcDadosTomador'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcDadosTomador', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.IdentificacaoTomador is not None:
self.IdentificacaoTomador.export(outfile, level, namespace_, name_='IdentificacaoTomador', pretty_print=pretty_print)
if self.RazaoSocial is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<RazaoSocial>%s</RazaoSocial>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocial), input_name='RazaoSocial')), eol_))
if self.Endereco is not None:
self.Endereco.export(outfile, level, namespace_, name_='Endereco', pretty_print=pretty_print)
if self.Contato is not None:
self.Contato.export(outfile, level, namespace_, name_='Contato', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'IdentificacaoTomador':
obj_ = tcIdentificacaoTomador.factory()
obj_.build(child_)
self.IdentificacaoTomador = obj_
obj_.original_tagname_ = 'IdentificacaoTomador'
elif nodeName_ == 'RazaoSocial':
RazaoSocial_ = child_.text
RazaoSocial_ = self.gds_validate_string(RazaoSocial_, node, 'RazaoSocial')
self.RazaoSocial = RazaoSocial_
# validate type tsRazaoSocial
self.validate_tsRazaoSocial(self.RazaoSocial)
elif nodeName_ == 'Endereco':
obj_ = tcEndereco.factory()
obj_.build(child_)
self.Endereco = obj_
obj_.original_tagname_ = 'Endereco'
elif nodeName_ == 'Contato':
obj_ = tcContato.factory()
obj_.build(child_)
self.Contato = obj_
obj_.original_tagname_ = 'Contato'
# end class tcDadosTomador
class tcIdentificacaoIntermediarioServico(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, RazaoSocial=None, CpfCnpj=None, InscricaoMunicipal=None):
self.original_tagname_ = None
self.RazaoSocial = RazaoSocial
self.validate_tsRazaoSocial(self.RazaoSocial)
self.CpfCnpj = CpfCnpj
self.InscricaoMunicipal = InscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcIdentificacaoIntermediarioServico)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcIdentificacaoIntermediarioServico.subclass:
return tcIdentificacaoIntermediarioServico.subclass(*args_, **kwargs_)
else:
return tcIdentificacaoIntermediarioServico(*args_, **kwargs_)
factory = staticmethod(factory)
def get_RazaoSocial(self): return self.RazaoSocial
def set_RazaoSocial(self, RazaoSocial): self.RazaoSocial = RazaoSocial
def get_CpfCnpj(self): return self.CpfCnpj
def set_CpfCnpj(self, CpfCnpj): self.CpfCnpj = CpfCnpj
def get_InscricaoMunicipal(self): return self.InscricaoMunicipal
def set_InscricaoMunicipal(self, InscricaoMunicipal): self.InscricaoMunicipal = InscricaoMunicipal
def validate_tsRazaoSocial(self, value):
# Validate type tsRazaoSocial, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 115:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsRazaoSocial' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsRazaoSocial' % {"value" : value.encode("utf-8")} )
def validate_tsInscricaoMunicipal(self, value):
# Validate type tsInscricaoMunicipal, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 15:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.RazaoSocial is not None or
self.CpfCnpj is not None or
self.InscricaoMunicipal is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcIdentificacaoIntermediarioServico', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcIdentificacaoIntermediarioServico')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcIdentificacaoIntermediarioServico')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcIdentificacaoIntermediarioServico', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcIdentificacaoIntermediarioServico'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcIdentificacaoIntermediarioServico', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.RazaoSocial is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<RazaoSocial>%s</RazaoSocial>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocial), input_name='RazaoSocial')), eol_))
if self.CpfCnpj is not None:
self.CpfCnpj.export(outfile, level, namespace_, name_='CpfCnpj', pretty_print=pretty_print)
if self.InscricaoMunicipal is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<InscricaoMunicipal>%s</InscricaoMunicipal>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.InscricaoMunicipal), input_name='InscricaoMunicipal')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'RazaoSocial':
RazaoSocial_ = child_.text
RazaoSocial_ = self.gds_validate_string(RazaoSocial_, node, 'RazaoSocial')
self.RazaoSocial = RazaoSocial_
# validate type tsRazaoSocial
self.validate_tsRazaoSocial(self.RazaoSocial)
elif nodeName_ == 'CpfCnpj':
obj_ = tcCpfCnpj.factory()
obj_.build(child_)
self.CpfCnpj = obj_
obj_.original_tagname_ = 'CpfCnpj'
elif nodeName_ == 'InscricaoMunicipal':
InscricaoMunicipal_ = child_.text
InscricaoMunicipal_ = self.gds_validate_string(InscricaoMunicipal_, node, 'InscricaoMunicipal')
self.InscricaoMunicipal = InscricaoMunicipal_
# validate type tsInscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
# end class tcIdentificacaoIntermediarioServico
class tcValores(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, ValorServicos=None, ValorDeducoes=None, ValorPis=None, ValorCofins=None, ValorInss=None, ValorIr=None, ValorCsll=None, IssRetido=None, ValorIss=None, ValorIssRetido=None, OutrasRetencoes=None, BaseCalculo=None, Aliquota=None, ValorLiquidoNfse=None, DescontoIncondicionado=None, DescontoCondicionado=None):
self.original_tagname_ = None
self.ValorServicos = ValorServicos
self.validate_tsValor(self.ValorServicos)
self.ValorDeducoes = ValorDeducoes
self.validate_tsValor(self.ValorDeducoes)
self.ValorPis = ValorPis
self.validate_tsValor(self.ValorPis)
self.ValorCofins = ValorCofins
self.validate_tsValor(self.ValorCofins)
self.ValorInss = ValorInss
self.validate_tsValor(self.ValorInss)
self.ValorIr = ValorIr
self.validate_tsValor(self.ValorIr)
self.ValorCsll = ValorCsll
self.validate_tsValor(self.ValorCsll)
self.IssRetido = IssRetido
self.validate_tsSimNao(self.IssRetido)
self.ValorIss = ValorIss
self.validate_tsValor(self.ValorIss)
self.ValorIssRetido = ValorIssRetido
self.validate_tsValor(self.ValorIssRetido)
self.OutrasRetencoes = OutrasRetencoes
self.validate_tsValor(self.OutrasRetencoes)
self.BaseCalculo = BaseCalculo
self.validate_tsValor(self.BaseCalculo)
self.Aliquota = Aliquota
self.validate_tsAliquota(self.Aliquota)
self.ValorLiquidoNfse = ValorLiquidoNfse
self.validate_tsValor(self.ValorLiquidoNfse)
self.DescontoIncondicionado = DescontoIncondicionado
self.validate_tsValor(self.DescontoIncondicionado)
self.DescontoCondicionado = DescontoCondicionado
self.validate_tsValor(self.DescontoCondicionado)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcValores)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcValores.subclass:
return tcValores.subclass(*args_, **kwargs_)
else:
return tcValores(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ValorServicos(self): return self.ValorServicos
def set_ValorServicos(self, ValorServicos): self.ValorServicos = ValorServicos
def get_ValorDeducoes(self): return self.ValorDeducoes
def set_ValorDeducoes(self, ValorDeducoes): self.ValorDeducoes = ValorDeducoes
def get_ValorPis(self): return self.ValorPis
def set_ValorPis(self, ValorPis): self.ValorPis = ValorPis
def get_ValorCofins(self): return self.ValorCofins
def set_ValorCofins(self, ValorCofins): self.ValorCofins = ValorCofins
def get_ValorInss(self): return self.ValorInss
def set_ValorInss(self, ValorInss): self.ValorInss = ValorInss
def get_ValorIr(self): return self.ValorIr
def set_ValorIr(self, ValorIr): self.ValorIr = ValorIr
def get_ValorCsll(self): return self.ValorCsll
def set_ValorCsll(self, ValorCsll): self.ValorCsll = ValorCsll
def get_IssRetido(self): return self.IssRetido
def set_IssRetido(self, IssRetido): self.IssRetido = IssRetido
def get_ValorIss(self): return self.ValorIss
def set_ValorIss(self, ValorIss): self.ValorIss = ValorIss
def get_ValorIssRetido(self): return self.ValorIssRetido
def set_ValorIssRetido(self, ValorIssRetido): self.ValorIssRetido = ValorIssRetido
def get_OutrasRetencoes(self): return self.OutrasRetencoes
def set_OutrasRetencoes(self, OutrasRetencoes): self.OutrasRetencoes = OutrasRetencoes
def get_BaseCalculo(self): return self.BaseCalculo
def set_BaseCalculo(self, BaseCalculo): self.BaseCalculo = BaseCalculo
def get_Aliquota(self): return self.Aliquota
def set_Aliquota(self, Aliquota): self.Aliquota = Aliquota
def get_ValorLiquidoNfse(self): return self.ValorLiquidoNfse
def set_ValorLiquidoNfse(self, ValorLiquidoNfse): self.ValorLiquidoNfse = ValorLiquidoNfse
def get_DescontoIncondicionado(self): return self.DescontoIncondicionado
def set_DescontoIncondicionado(self, DescontoIncondicionado): self.DescontoIncondicionado = DescontoIncondicionado
def get_DescontoCondicionado(self): return self.DescontoCondicionado
def set_DescontoCondicionado(self, DescontoCondicionado): self.DescontoCondicionado = DescontoCondicionado
def validate_tsValor(self, value):
# Validate type tsValor, a restriction on xsd:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tsValor' % {"value" : value} )
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsValor' % {"value" : value} )
def validate_tsSimNao(self, value):
# Validate type tsSimNao, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsSimNao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsSimNao_patterns_, ))
validate_tsSimNao_patterns_ = [['^1$|^2$']]
def validate_tsAliquota(self, value):
# Validate type tsAliquota, a restriction on xsd:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tsAliquota' % {"value" : value} )
if len(str(value)) >= 5:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsAliquota' % {"value" : value} )
def hasContent_(self):
if (
self.ValorServicos is not None or
self.ValorDeducoes is not None or
self.ValorPis is not None or
self.ValorCofins is not None or
self.ValorInss is not None or
self.ValorIr is not None or
self.ValorCsll is not None or
self.IssRetido is not None or
self.ValorIss is not None or
self.ValorIssRetido is not None or
self.OutrasRetencoes is not None or
self.BaseCalculo is not None or
self.Aliquota is not None or
self.ValorLiquidoNfse is not None or
self.DescontoIncondicionado is not None or
self.DescontoCondicionado is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcValores', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcValores')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcValores')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcValores', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcValores'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcValores', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.ValorServicos is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorServicos>%s</ValorServicos>%s' % (self.gds_format_float(self.ValorServicos, input_name='ValorServicos'), eol_))
if self.ValorDeducoes is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorDeducoes>%s</ValorDeducoes>%s' % (self.gds_format_float(self.ValorDeducoes, input_name='ValorDeducoes'), eol_))
if self.ValorPis is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorPis>%s</ValorPis>%s' % (self.gds_format_float(self.ValorPis, input_name='ValorPis'), eol_))
if self.ValorCofins is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorCofins>%s</ValorCofins>%s' % (self.gds_format_float(self.ValorCofins, input_name='ValorCofins'), eol_))
if self.ValorInss is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorInss>%s</ValorInss>%s' % (self.gds_format_float(self.ValorInss, input_name='ValorInss'), eol_))
if self.ValorIr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorIr>%s</ValorIr>%s' % (self.gds_format_float(self.ValorIr, input_name='ValorIr'), eol_))
if self.ValorCsll is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorCsll>%s</ValorCsll>%s' % (self.gds_format_float(self.ValorCsll, input_name='ValorCsll'), eol_))
if self.IssRetido is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<IssRetido>%s</IssRetido>%s' % (self.gds_format_integer(self.IssRetido, input_name='IssRetido'), eol_))
if self.ValorIss is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorIss>%s</ValorIss>%s' % (self.gds_format_float(self.ValorIss, input_name='ValorIss'), eol_))
if self.ValorIssRetido is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorIssRetido>%s</ValorIssRetido>%s' % (self.gds_format_float(self.ValorIssRetido, input_name='ValorIssRetido'), eol_))
if self.OutrasRetencoes is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<OutrasRetencoes>%s</OutrasRetencoes>%s' % (self.gds_format_float(self.OutrasRetencoes, input_name='OutrasRetencoes'), eol_))
if self.BaseCalculo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<BaseCalculo>%s</BaseCalculo>%s' % (self.gds_format_float(self.BaseCalculo, input_name='BaseCalculo'), eol_))
if self.Aliquota is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Aliquota>%s</Aliquota>%s' % (self.gds_format_float(self.Aliquota, input_name='Aliquota'), eol_))
if self.ValorLiquidoNfse is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorLiquidoNfse>%s</ValorLiquidoNfse>%s' % (self.gds_format_float(self.ValorLiquidoNfse, input_name='ValorLiquidoNfse'), eol_))
if self.DescontoIncondicionado is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<DescontoIncondicionado>%s</DescontoIncondicionado>%s' % (self.gds_format_float(self.DescontoIncondicionado, input_name='DescontoIncondicionado'), eol_))
if self.DescontoCondicionado is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<DescontoCondicionado>%s</DescontoCondicionado>%s' % (self.gds_format_float(self.DescontoCondicionado, input_name='DescontoCondicionado'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'ValorServicos':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorServicos')
self.ValorServicos = fval_
# validate type tsValor
self.validate_tsValor(self.ValorServicos)
elif nodeName_ == 'ValorDeducoes':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorDeducoes')
self.ValorDeducoes = fval_
# validate type tsValor
self.validate_tsValor(self.ValorDeducoes)
elif nodeName_ == 'ValorPis':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorPis')
self.ValorPis = fval_
# validate type tsValor
self.validate_tsValor(self.ValorPis)
elif nodeName_ == 'ValorCofins':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorCofins')
self.ValorCofins = fval_
# validate type tsValor
self.validate_tsValor(self.ValorCofins)
elif nodeName_ == 'ValorInss':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorInss')
self.ValorInss = fval_
# validate type tsValor
self.validate_tsValor(self.ValorInss)
elif nodeName_ == 'ValorIr':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorIr')
self.ValorIr = fval_
# validate type tsValor
self.validate_tsValor(self.ValorIr)
elif nodeName_ == 'ValorCsll':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorCsll')
self.ValorCsll = fval_
# validate type tsValor
self.validate_tsValor(self.ValorCsll)
elif nodeName_ == 'IssRetido':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'IssRetido')
self.IssRetido = ival_
# validate type tsSimNao
self.validate_tsSimNao(self.IssRetido)
elif nodeName_ == 'ValorIss':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorIss')
self.ValorIss = fval_
# validate type tsValor
self.validate_tsValor(self.ValorIss)
elif nodeName_ == 'ValorIssRetido':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorIssRetido')
self.ValorIssRetido = fval_
# validate type tsValor
self.validate_tsValor(self.ValorIssRetido)
elif nodeName_ == 'OutrasRetencoes':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'OutrasRetencoes')
self.OutrasRetencoes = fval_
# validate type tsValor
self.validate_tsValor(self.OutrasRetencoes)
elif nodeName_ == 'BaseCalculo':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'BaseCalculo')
self.BaseCalculo = fval_
# validate type tsValor
self.validate_tsValor(self.BaseCalculo)
elif nodeName_ == 'Aliquota':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'Aliquota')
self.Aliquota = fval_
# validate type tsAliquota
self.validate_tsAliquota(self.Aliquota)
elif nodeName_ == 'ValorLiquidoNfse':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorLiquidoNfse')
self.ValorLiquidoNfse = fval_
# validate type tsValor
self.validate_tsValor(self.ValorLiquidoNfse)
elif nodeName_ == 'DescontoIncondicionado':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'DescontoIncondicionado')
self.DescontoIncondicionado = fval_
# validate type tsValor
self.validate_tsValor(self.DescontoIncondicionado)
elif nodeName_ == 'DescontoCondicionado':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'DescontoCondicionado')
self.DescontoCondicionado = fval_
# validate type tsValor
self.validate_tsValor(self.DescontoCondicionado)
# end class tcValores
class tcDadosServico(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Valores=None, ItemListaServico=None, CodigoCnae=None, CodigoTributacaoMunicipio=None, Discriminacao=None, CodigoMunicipio=None):
self.original_tagname_ = None
self.Valores = Valores
self.ItemListaServico = ItemListaServico
self.validate_tsItemListaServico(self.ItemListaServico)
self.CodigoCnae = CodigoCnae
self.validate_tsCodigoCnae(self.CodigoCnae)
self.CodigoTributacaoMunicipio = CodigoTributacaoMunicipio
self.validate_tsCodigoTributacao(self.CodigoTributacaoMunicipio)
self.Discriminacao = Discriminacao
self.validate_tsDiscriminacao(self.Discriminacao)
self.CodigoMunicipio = CodigoMunicipio
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcDadosServico)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcDadosServico.subclass:
return tcDadosServico.subclass(*args_, **kwargs_)
else:
return tcDadosServico(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Valores(self): return self.Valores
def set_Valores(self, Valores): self.Valores = Valores
def get_ItemListaServico(self): return self.ItemListaServico
def set_ItemListaServico(self, ItemListaServico): self.ItemListaServico = ItemListaServico
def get_CodigoCnae(self): return self.CodigoCnae
def set_CodigoCnae(self, CodigoCnae): self.CodigoCnae = CodigoCnae
def get_CodigoTributacaoMunicipio(self): return self.CodigoTributacaoMunicipio
def set_CodigoTributacaoMunicipio(self, CodigoTributacaoMunicipio): self.CodigoTributacaoMunicipio = CodigoTributacaoMunicipio
def get_Discriminacao(self): return self.Discriminacao
def set_Discriminacao(self, Discriminacao): self.Discriminacao = Discriminacao
def get_CodigoMunicipio(self): return self.CodigoMunicipio
def set_CodigoMunicipio(self, CodigoMunicipio): self.CodigoMunicipio = CodigoMunicipio
def validate_tsItemListaServico(self, value):
# Validate type tsItemListaServico, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 5:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsItemListaServico' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsItemListaServico' % {"value" : value.encode("utf-8")} )
def validate_tsCodigoCnae(self, value):
# Validate type tsCodigoCnae, a restriction on xsd:int.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 7:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsCodigoCnae' % {"value" : value} )
def validate_tsCodigoTributacao(self, value):
# Validate type tsCodigoTributacao, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 20:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsCodigoTributacao' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsCodigoTributacao' % {"value" : value.encode("utf-8")} )
def validate_tsDiscriminacao(self, value):
# Validate type tsDiscriminacao, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 2000:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsDiscriminacao' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsDiscriminacao' % {"value" : value.encode("utf-8")} )
def validate_tsCodigoMunicipioIbge(self, value):
# Validate type tsCodigoMunicipioIbge, a restriction on xsd:int.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 7:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsCodigoMunicipioIbge' % {"value" : value} )
def hasContent_(self):
if (
self.Valores is not None or
self.ItemListaServico is not None or
self.CodigoCnae is not None or
self.CodigoTributacaoMunicipio is not None or
self.Discriminacao is not None or
self.CodigoMunicipio is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcDadosServico', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcDadosServico')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcDadosServico')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcDadosServico', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcDadosServico'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcDadosServico', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Valores is not None:
self.Valores.export(outfile, level, namespace_, name_='Valores', pretty_print=pretty_print)
if self.ItemListaServico is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ItemListaServico>%s</ItemListaServico>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.ItemListaServico), input_name='ItemListaServico')), eol_))
if self.CodigoCnae is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoCnae>%s</CodigoCnae>%s' % (self.gds_format_integer(self.CodigoCnae, input_name='CodigoCnae'), eol_))
if self.CodigoTributacaoMunicipio is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoTributacaoMunicipio>%s</CodigoTributacaoMunicipio>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CodigoTributacaoMunicipio), input_name='CodigoTributacaoMunicipio')), eol_))
if self.Discriminacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Discriminacao>%s</Discriminacao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Discriminacao), input_name='Discriminacao')), eol_))
if self.CodigoMunicipio is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoMunicipio>%s</CodigoMunicipio>%s' % (self.gds_format_integer(self.CodigoMunicipio, input_name='CodigoMunicipio'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Valores':
obj_ = tcValores.factory()
obj_.build(child_)
self.Valores = obj_
obj_.original_tagname_ = 'Valores'
elif nodeName_ == 'ItemListaServico':
ItemListaServico_ = child_.text
ItemListaServico_ = self.gds_validate_string(ItemListaServico_, node, 'ItemListaServico')
self.ItemListaServico = ItemListaServico_
# validate type tsItemListaServico
self.validate_tsItemListaServico(self.ItemListaServico)
elif nodeName_ == 'CodigoCnae':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CodigoCnae')
self.CodigoCnae = ival_
# validate type tsCodigoCnae
self.validate_tsCodigoCnae(self.CodigoCnae)
elif nodeName_ == 'CodigoTributacaoMunicipio':
CodigoTributacaoMunicipio_ = child_.text
CodigoTributacaoMunicipio_ = self.gds_validate_string(CodigoTributacaoMunicipio_, node, 'CodigoTributacaoMunicipio')
self.CodigoTributacaoMunicipio = CodigoTributacaoMunicipio_
# validate type tsCodigoTributacao
self.validate_tsCodigoTributacao(self.CodigoTributacaoMunicipio)
elif nodeName_ == 'Discriminacao':
Discriminacao_ = child_.text
Discriminacao_ = self.gds_validate_string(Discriminacao_, node, 'Discriminacao')
self.Discriminacao = Discriminacao_
# validate type tsDiscriminacao
self.validate_tsDiscriminacao(self.Discriminacao)
elif nodeName_ == 'CodigoMunicipio':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CodigoMunicipio')
self.CodigoMunicipio = ival_
# validate type tsCodigoMunicipioIbge
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
# end class tcDadosServico
class tcDadosConstrucaoCivil(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, CodigoObra=None, Art=None):
self.original_tagname_ = None
self.CodigoObra = CodigoObra
self.validate_tsCodigoObra(self.CodigoObra)
self.Art = Art
self.validate_tsArt(self.Art)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcDadosConstrucaoCivil)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcDadosConstrucaoCivil.subclass:
return tcDadosConstrucaoCivil.subclass(*args_, **kwargs_)
else:
return tcDadosConstrucaoCivil(*args_, **kwargs_)
factory = staticmethod(factory)
def get_CodigoObra(self): return self.CodigoObra
def set_CodigoObra(self, CodigoObra): self.CodigoObra = CodigoObra
def get_Art(self): return self.Art
def set_Art(self, Art): self.Art = Art
def validate_tsCodigoObra(self, value):
# Validate type tsCodigoObra, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 15:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsCodigoObra' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsCodigoObra' % {"value" : value.encode("utf-8")} )
def validate_tsArt(self, value):
# Validate type tsArt, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 15:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsArt' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsArt' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.CodigoObra is not None or
self.Art is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcDadosConstrucaoCivil', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcDadosConstrucaoCivil')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcDadosConstrucaoCivil')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcDadosConstrucaoCivil', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcDadosConstrucaoCivil'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcDadosConstrucaoCivil', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CodigoObra is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoObra>%s</CodigoObra>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CodigoObra), input_name='CodigoObra')), eol_))
if self.Art is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Art>%s</Art>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Art), input_name='Art')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CodigoObra':
CodigoObra_ = child_.text
CodigoObra_ = self.gds_validate_string(CodigoObra_, node, 'CodigoObra')
self.CodigoObra = CodigoObra_
# validate type tsCodigoObra
self.validate_tsCodigoObra(self.CodigoObra)
elif nodeName_ == 'Art':
Art_ = child_.text
Art_ = self.gds_validate_string(Art_, node, 'Art')
self.Art = Art_
# validate type tsArt
self.validate_tsArt(self.Art)
# end class tcDadosConstrucaoCivil
class tcDadosPrestador(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, IdentificacaoPrestador=None, RazaoSocial=None, NomeFantasia=None, Endereco=None, Contato=None):
self.original_tagname_ = None
self.IdentificacaoPrestador = IdentificacaoPrestador
self.RazaoSocial = RazaoSocial
self.validate_tsRazaoSocial(self.RazaoSocial)
self.NomeFantasia = NomeFantasia
self.validate_tsNomeFantasia(self.NomeFantasia)
self.Endereco = Endereco
self.Contato = Contato
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcDadosPrestador)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcDadosPrestador.subclass:
return tcDadosPrestador.subclass(*args_, **kwargs_)
else:
return tcDadosPrestador(*args_, **kwargs_)
factory = staticmethod(factory)
def get_IdentificacaoPrestador(self): return self.IdentificacaoPrestador
def set_IdentificacaoPrestador(self, IdentificacaoPrestador): self.IdentificacaoPrestador = IdentificacaoPrestador
def get_RazaoSocial(self): return self.RazaoSocial
def set_RazaoSocial(self, RazaoSocial): self.RazaoSocial = RazaoSocial
def get_NomeFantasia(self): return self.NomeFantasia
def set_NomeFantasia(self, NomeFantasia): self.NomeFantasia = NomeFantasia
def get_Endereco(self): return self.Endereco
def set_Endereco(self, Endereco): self.Endereco = Endereco
def get_Contato(self): return self.Contato
def set_Contato(self, Contato): self.Contato = Contato
def validate_tsRazaoSocial(self, value):
# Validate type tsRazaoSocial, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 115:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsRazaoSocial' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsRazaoSocial' % {"value" : value.encode("utf-8")} )
def validate_tsNomeFantasia(self, value):
# Validate type tsNomeFantasia, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 60:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsNomeFantasia' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsNomeFantasia' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.IdentificacaoPrestador is not None or
self.RazaoSocial is not None or
self.NomeFantasia is not None or
self.Endereco is not None or
self.Contato is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcDadosPrestador', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcDadosPrestador')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcDadosPrestador')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcDadosPrestador', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcDadosPrestador'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcDadosPrestador', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.IdentificacaoPrestador is not None:
self.IdentificacaoPrestador.export(outfile, level, namespace_, name_='IdentificacaoPrestador', pretty_print=pretty_print)
if self.RazaoSocial is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<RazaoSocial>%s</RazaoSocial>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocial), input_name='RazaoSocial')), eol_))
if self.NomeFantasia is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<NomeFantasia>%s</NomeFantasia>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.NomeFantasia), input_name='NomeFantasia')), eol_))
if self.Endereco is not None:
self.Endereco.export(outfile, level, namespace_, name_='Endereco', pretty_print=pretty_print)
if self.Contato is not None:
self.Contato.export(outfile, level, namespace_, name_='Contato', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'IdentificacaoPrestador':
obj_ = tcIdentificacaoPrestador.factory()
obj_.build(child_)
self.IdentificacaoPrestador = obj_
obj_.original_tagname_ = 'IdentificacaoPrestador'
elif nodeName_ == 'RazaoSocial':
RazaoSocial_ = child_.text
RazaoSocial_ = self.gds_validate_string(RazaoSocial_, node, 'RazaoSocial')
self.RazaoSocial = RazaoSocial_
# validate type tsRazaoSocial
self.validate_tsRazaoSocial(self.RazaoSocial)
elif nodeName_ == 'NomeFantasia':
NomeFantasia_ = child_.text
NomeFantasia_ = self.gds_validate_string(NomeFantasia_, node, 'NomeFantasia')
self.NomeFantasia = NomeFantasia_
# validate type tsNomeFantasia
self.validate_tsNomeFantasia(self.NomeFantasia)
elif nodeName_ == 'Endereco':
obj_ = tcEndereco.factory()
obj_.build(child_)
self.Endereco = obj_
obj_.original_tagname_ = 'Endereco'
elif nodeName_ == 'Contato':
obj_ = tcContato.factory()
obj_.build(child_)
self.Contato = obj_
obj_.original_tagname_ = 'Contato'
# end class tcDadosPrestador
class tcInfRps(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, IdentificacaoRps=None, DataEmissao=None, NaturezaOperacao=None, RegimeEspecialTributacao=None, OptanteSimplesNacional=None, IncentivadorCultural=None, Status=None, RpsSubstituido=None, Servico=None, Prestador=None, Tomador=None, IntermediarioServico=None, ConstrucaoCivil=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.IdentificacaoRps = IdentificacaoRps
if isinstance(DataEmissao, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DataEmissao, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = DataEmissao
self.DataEmissao = initvalue_
self.NaturezaOperacao = NaturezaOperacao
self.validate_tsNaturezaOperacao(self.NaturezaOperacao)
self.RegimeEspecialTributacao = RegimeEspecialTributacao
self.validate_tsRegimeEspecialTributacao(self.RegimeEspecialTributacao)
self.OptanteSimplesNacional = OptanteSimplesNacional
self.validate_tsSimNao(self.OptanteSimplesNacional)
self.IncentivadorCultural = IncentivadorCultural
self.validate_tsSimNao(self.IncentivadorCultural)
self.Status = Status
self.validate_tsStatusRps(self.Status)
self.RpsSubstituido = RpsSubstituido
self.Servico = Servico
self.Prestador = Prestador
self.Tomador = Tomador
self.IntermediarioServico = IntermediarioServico
self.ConstrucaoCivil = ConstrucaoCivil
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcInfRps)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcInfRps.subclass:
return tcInfRps.subclass(*args_, **kwargs_)
else:
return tcInfRps(*args_, **kwargs_)
factory = staticmethod(factory)
def get_IdentificacaoRps(self): return self.IdentificacaoRps
def set_IdentificacaoRps(self, IdentificacaoRps): self.IdentificacaoRps = IdentificacaoRps
def get_DataEmissao(self): return self.DataEmissao
def set_DataEmissao(self, DataEmissao): self.DataEmissao = DataEmissao
def get_NaturezaOperacao(self): return self.NaturezaOperacao
def set_NaturezaOperacao(self, NaturezaOperacao): self.NaturezaOperacao = NaturezaOperacao
def get_RegimeEspecialTributacao(self): return self.RegimeEspecialTributacao
def set_RegimeEspecialTributacao(self, RegimeEspecialTributacao): self.RegimeEspecialTributacao = RegimeEspecialTributacao
def get_OptanteSimplesNacional(self): return self.OptanteSimplesNacional
def set_OptanteSimplesNacional(self, OptanteSimplesNacional): self.OptanteSimplesNacional = OptanteSimplesNacional
def get_IncentivadorCultural(self): return self.IncentivadorCultural
def set_IncentivadorCultural(self, IncentivadorCultural): self.IncentivadorCultural = IncentivadorCultural
def get_Status(self): return self.Status
def set_Status(self, Status): self.Status = Status
def get_RpsSubstituido(self): return self.RpsSubstituido
def set_RpsSubstituido(self, RpsSubstituido): self.RpsSubstituido = RpsSubstituido
def get_Servico(self): return self.Servico
def set_Servico(self, Servico): self.Servico = Servico
def get_Prestador(self): return self.Prestador
def set_Prestador(self, Prestador): self.Prestador = Prestador
def get_Tomador(self): return self.Tomador
def set_Tomador(self, Tomador): self.Tomador = Tomador
def get_IntermediarioServico(self): return self.IntermediarioServico
def set_IntermediarioServico(self, IntermediarioServico): self.IntermediarioServico = IntermediarioServico
def get_ConstrucaoCivil(self): return self.ConstrucaoCivil
def set_ConstrucaoCivil(self, ConstrucaoCivil): self.ConstrucaoCivil = ConstrucaoCivil
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def validate_tsNaturezaOperacao(self, value):
# Validate type tsNaturezaOperacao, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsNaturezaOperacao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsNaturezaOperacao_patterns_, ))
validate_tsNaturezaOperacao_patterns_ = [['^1$|^2$|^3$|^4$|^5$|^6$']]
def validate_tsRegimeEspecialTributacao(self, value):
# Validate type tsRegimeEspecialTributacao, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsRegimeEspecialTributacao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsRegimeEspecialTributacao_patterns_, ))
validate_tsRegimeEspecialTributacao_patterns_ = [['^0$|^1$|^2$|^3$|^4$|^5$|^6$']]
def validate_tsSimNao(self, value):
# Validate type tsSimNao, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsSimNao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsSimNao_patterns_, ))
validate_tsSimNao_patterns_ = [['^1$|^2$']]
def validate_tsStatusRps(self, value):
# Validate type tsStatusRps, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsStatusRps_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsStatusRps_patterns_, ))
validate_tsStatusRps_patterns_ = [['^1$|^2$']]
def validate_tsIdTag(self, value):
# Validate type tsIdTag, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsIdTag' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.IdentificacaoRps is not None or
self.DataEmissao is not None or
self.NaturezaOperacao is not None or
self.RegimeEspecialTributacao is not None or
self.OptanteSimplesNacional is not None or
self.IncentivadorCultural is not None or
self.Status is not None or
self.RpsSubstituido is not None or
self.Servico is not None or
self.Prestador is not None or
self.Tomador is not None or
self.IntermediarioServico is not None or
self.ConstrucaoCivil is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcInfRps', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcInfRps')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcInfRps')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcInfRps', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcInfRps'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='tcInfRps', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.IdentificacaoRps is not None:
self.IdentificacaoRps.export(outfile, level, namespace_, name_='IdentificacaoRps', pretty_print=pretty_print)
if self.DataEmissao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<DataEmissao>%s</DataEmissao>%s' % (self.gds_format_datetime(self.DataEmissao, input_name='DataEmissao'), eol_))
if self.NaturezaOperacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<NaturezaOperacao>%s</NaturezaOperacao>%s' % (self.gds_format_integer(self.NaturezaOperacao, input_name='NaturezaOperacao'), eol_))
if self.RegimeEspecialTributacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<RegimeEspecialTributacao>%s</RegimeEspecialTributacao>%s' % (self.gds_format_integer(self.RegimeEspecialTributacao, input_name='RegimeEspecialTributacao'), eol_))
if self.OptanteSimplesNacional is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<OptanteSimplesNacional>%s</OptanteSimplesNacional>%s' % (self.gds_format_integer(self.OptanteSimplesNacional, input_name='OptanteSimplesNacional'), eol_))
if self.IncentivadorCultural is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<IncentivadorCultural>%s</IncentivadorCultural>%s' % (self.gds_format_integer(self.IncentivadorCultural, input_name='IncentivadorCultural'), eol_))
if self.Status is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Status>%s</Status>%s' % (self.gds_format_integer(self.Status, input_name='Status'), eol_))
if self.RpsSubstituido is not None:
self.RpsSubstituido.export(outfile, level, namespace_, name_='RpsSubstituido', pretty_print=pretty_print)
if self.Servico is not None:
self.Servico.export(outfile, level, namespace_, name_='Servico', pretty_print=pretty_print)
if self.Prestador is not None:
self.Prestador.export(outfile, level, namespace_, name_='Prestador', pretty_print=pretty_print)
if self.Tomador is not None:
self.Tomador.export(outfile, level, namespace_, name_='Tomador', pretty_print=pretty_print)
if self.IntermediarioServico is not None:
self.IntermediarioServico.export(outfile, level, namespace_, name_='IntermediarioServico', pretty_print=pretty_print)
if self.ConstrucaoCivil is not None:
self.ConstrucaoCivil.export(outfile, level, namespace_, name_='ConstrucaoCivil', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
self.validate_tsIdTag(self.Id) # validate type tsIdTag
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'IdentificacaoRps':
obj_ = tcIdentificacaoRps.factory()
obj_.build(child_)
self.IdentificacaoRps = obj_
obj_.original_tagname_ = 'IdentificacaoRps'
elif nodeName_ == 'DataEmissao':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.DataEmissao = dval_
elif nodeName_ == 'NaturezaOperacao':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NaturezaOperacao')
self.NaturezaOperacao = ival_
# validate type tsNaturezaOperacao
self.validate_tsNaturezaOperacao(self.NaturezaOperacao)
elif nodeName_ == 'RegimeEspecialTributacao':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'RegimeEspecialTributacao')
self.RegimeEspecialTributacao = ival_
# validate type tsRegimeEspecialTributacao
self.validate_tsRegimeEspecialTributacao(self.RegimeEspecialTributacao)
elif nodeName_ == 'OptanteSimplesNacional':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'OptanteSimplesNacional')
self.OptanteSimplesNacional = ival_
# validate type tsSimNao
self.validate_tsSimNao(self.OptanteSimplesNacional)
elif nodeName_ == 'IncentivadorCultural':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'IncentivadorCultural')
self.IncentivadorCultural = ival_
# validate type tsSimNao
self.validate_tsSimNao(self.IncentivadorCultural)
elif nodeName_ == 'Status':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'Status')
self.Status = ival_
# validate type tsStatusRps
self.validate_tsStatusRps(self.Status)
elif nodeName_ == 'RpsSubstituido':
obj_ = tcIdentificacaoRps.factory()
obj_.build(child_)
self.RpsSubstituido = obj_
obj_.original_tagname_ = 'RpsSubstituido'
elif nodeName_ == 'Servico':
obj_ = tcDadosServico.factory()
obj_.build(child_)
self.Servico = obj_
obj_.original_tagname_ = 'Servico'
elif nodeName_ == 'Prestador':
obj_ = tcIdentificacaoPrestador.factory()
obj_.build(child_)
self.Prestador = obj_
obj_.original_tagname_ = 'Prestador'
elif nodeName_ == 'Tomador':
obj_ = tcDadosTomador.factory()
obj_.build(child_)
self.Tomador = obj_
obj_.original_tagname_ = 'Tomador'
elif nodeName_ == 'IntermediarioServico':
obj_ = tcIdentificacaoIntermediarioServico.factory()
obj_.build(child_)
self.IntermediarioServico = obj_
obj_.original_tagname_ = 'IntermediarioServico'
elif nodeName_ == 'ConstrucaoCivil':
obj_ = tcDadosConstrucaoCivil.factory()
obj_.build(child_)
self.ConstrucaoCivil = obj_
obj_.original_tagname_ = 'ConstrucaoCivil'
# end class tcInfRps
class tcRps(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, InfRps=None, Signature=None):
self.original_tagname_ = None
self.InfRps = InfRps
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcRps)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcRps.subclass:
return tcRps.subclass(*args_, **kwargs_)
else:
return tcRps(*args_, **kwargs_)
factory = staticmethod(factory)
def get_InfRps(self): return self.InfRps
def set_InfRps(self, InfRps): self.InfRps = InfRps
def get_Signature(self): return self.Signature
def set_Signature(self, Signature): self.Signature = Signature
def hasContent_(self):
if (
self.InfRps is not None or
self.Signature is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcRps', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcRps')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcRps')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcRps', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcRps'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcRps', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.InfRps is not None:
self.InfRps.export(outfile, level, namespace_, name_='InfRps', pretty_print=pretty_print)
if self.Signature is not None:
self.Signature.export(outfile, level, namespace_='dsig:', name_='Signature', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'InfRps':
obj_ = tcInfRps.factory()
obj_.build(child_)
self.InfRps = obj_
obj_.original_tagname_ = 'InfRps'
elif nodeName_ == 'Signature':
obj_ = SignatureType.factory()
obj_.build(child_)
self.Signature = obj_
obj_.original_tagname_ = 'Signature'
# end class tcRps
class tcIdentificacaoNfse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Numero=None, Cnpj=None, InscricaoMunicipal=None, CodigoMunicipio=None):
self.original_tagname_ = None
self.Numero = Numero
self.validate_tsNumeroNfse(self.Numero)
self.Cnpj = Cnpj
self.validate_tsCnpj(self.Cnpj)
self.InscricaoMunicipal = InscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
self.CodigoMunicipio = CodigoMunicipio
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcIdentificacaoNfse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcIdentificacaoNfse.subclass:
return tcIdentificacaoNfse.subclass(*args_, **kwargs_)
else:
return tcIdentificacaoNfse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Numero(self): return self.Numero
def set_Numero(self, Numero): self.Numero = Numero
def get_Cnpj(self): return self.Cnpj
def set_Cnpj(self, Cnpj): self.Cnpj = Cnpj
def get_InscricaoMunicipal(self): return self.InscricaoMunicipal
def set_InscricaoMunicipal(self, InscricaoMunicipal): self.InscricaoMunicipal = InscricaoMunicipal
def get_CodigoMunicipio(self): return self.CodigoMunicipio
def set_CodigoMunicipio(self, CodigoMunicipio): self.CodigoMunicipio = CodigoMunicipio
def validate_tsNumeroNfse(self, value):
# Validate type tsNumeroNfse, a restriction on xsd:nonNegativeInteger.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsNumeroNfse' % {"value" : value} )
def validate_tsCnpj(self, value):
# Validate type tsCnpj, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) != 14:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on tsCnpj' % {"value" : value.encode("utf-8")} )
def validate_tsInscricaoMunicipal(self, value):
# Validate type tsInscricaoMunicipal, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 15:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
def validate_tsCodigoMunicipioIbge(self, value):
# Validate type tsCodigoMunicipioIbge, a restriction on xsd:int.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 7:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsCodigoMunicipioIbge' % {"value" : value} )
def hasContent_(self):
if (
self.Numero is not None or
self.Cnpj is not None or
self.InscricaoMunicipal is not None or
self.CodigoMunicipio is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcIdentificacaoNfse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcIdentificacaoNfse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcIdentificacaoNfse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcIdentificacaoNfse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcIdentificacaoNfse'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcIdentificacaoNfse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Numero is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Numero>%s</Numero>%s' % (self.gds_format_integer(self.Numero, input_name='Numero'), eol_))
if self.Cnpj is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Cnpj>%s</Cnpj>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Cnpj), input_name='Cnpj')), eol_))
if self.InscricaoMunicipal is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<InscricaoMunicipal>%s</InscricaoMunicipal>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.InscricaoMunicipal), input_name='InscricaoMunicipal')), eol_))
if self.CodigoMunicipio is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoMunicipio>%s</CodigoMunicipio>%s' % (self.gds_format_integer(self.CodigoMunicipio, input_name='CodigoMunicipio'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Numero':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'Numero')
self.Numero = ival_
# validate type tsNumeroNfse
self.validate_tsNumeroNfse(self.Numero)
elif nodeName_ == 'Cnpj':
Cnpj_ = child_.text
Cnpj_ = self.gds_validate_string(Cnpj_, node, 'Cnpj')
self.Cnpj = Cnpj_
# validate type tsCnpj
self.validate_tsCnpj(self.Cnpj)
elif nodeName_ == 'InscricaoMunicipal':
InscricaoMunicipal_ = child_.text
InscricaoMunicipal_ = self.gds_validate_string(InscricaoMunicipal_, node, 'InscricaoMunicipal')
self.InscricaoMunicipal = InscricaoMunicipal_
# validate type tsInscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
elif nodeName_ == 'CodigoMunicipio':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CodigoMunicipio')
self.CodigoMunicipio = ival_
# validate type tsCodigoMunicipioIbge
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
# end class tcIdentificacaoNfse
class tcInfNfse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, Numero=None, CodigoVerificacao=None, DataEmissao=None, IdentificacaoRps=None, DataEmissaoRps=None, NaturezaOperacao=None, RegimeEspecialTributacao=None, OptanteSimplesNacional=None, IncentivadorCultural=None, Competencia=None, NfseSubstituida=None, OutrasInformacoes=None, Servico=None, ValorCredito=None, PrestadorServico=None, TomadorServico=None, IntermediarioServico=None, OrgaoGerador=None, ConstrucaoCivil=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.Numero = Numero
self.validate_tsNumeroNfse(self.Numero)
self.CodigoVerificacao = CodigoVerificacao
self.validate_tsCodigoVerificacao(self.CodigoVerificacao)
if isinstance(DataEmissao, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DataEmissao, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = DataEmissao
self.DataEmissao = initvalue_
self.IdentificacaoRps = IdentificacaoRps
if isinstance(DataEmissaoRps, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DataEmissaoRps, '%Y-%m-%d').date()
else:
initvalue_ = DataEmissaoRps
self.DataEmissaoRps = initvalue_
self.NaturezaOperacao = NaturezaOperacao
self.validate_tsNaturezaOperacao(self.NaturezaOperacao)
self.RegimeEspecialTributacao = RegimeEspecialTributacao
self.validate_tsRegimeEspecialTributacao(self.RegimeEspecialTributacao)
self.OptanteSimplesNacional = OptanteSimplesNacional
self.validate_tsSimNao(self.OptanteSimplesNacional)
self.IncentivadorCultural = IncentivadorCultural
self.validate_tsSimNao(self.IncentivadorCultural)
self.Competencia = Competencia
self.NfseSubstituida = NfseSubstituida
self.validate_tsNumeroNfse(self.NfseSubstituida)
self.OutrasInformacoes = OutrasInformacoes
self.validate_tsOutrasInformacoes(self.OutrasInformacoes)
self.Servico = Servico
self.ValorCredito = ValorCredito
self.validate_tsValor(self.ValorCredito)
self.PrestadorServico = PrestadorServico
self.TomadorServico = TomadorServico
self.IntermediarioServico = IntermediarioServico
self.OrgaoGerador = OrgaoGerador
self.ConstrucaoCivil = ConstrucaoCivil
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcInfNfse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcInfNfse.subclass:
return tcInfNfse.subclass(*args_, **kwargs_)
else:
return tcInfNfse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Numero(self): return self.Numero
def set_Numero(self, Numero): self.Numero = Numero
def get_CodigoVerificacao(self): return self.CodigoVerificacao
def set_CodigoVerificacao(self, CodigoVerificacao): self.CodigoVerificacao = CodigoVerificacao
def get_DataEmissao(self): return self.DataEmissao
def set_DataEmissao(self, DataEmissao): self.DataEmissao = DataEmissao
def get_IdentificacaoRps(self): return self.IdentificacaoRps
def set_IdentificacaoRps(self, IdentificacaoRps): self.IdentificacaoRps = IdentificacaoRps
def get_DataEmissaoRps(self): return self.DataEmissaoRps
def set_DataEmissaoRps(self, DataEmissaoRps): self.DataEmissaoRps = DataEmissaoRps
def get_NaturezaOperacao(self): return self.NaturezaOperacao
def set_NaturezaOperacao(self, NaturezaOperacao): self.NaturezaOperacao = NaturezaOperacao
def get_RegimeEspecialTributacao(self): return self.RegimeEspecialTributacao
def set_RegimeEspecialTributacao(self, RegimeEspecialTributacao): self.RegimeEspecialTributacao = RegimeEspecialTributacao
def get_OptanteSimplesNacional(self): return self.OptanteSimplesNacional
def set_OptanteSimplesNacional(self, OptanteSimplesNacional): self.OptanteSimplesNacional = OptanteSimplesNacional
def get_IncentivadorCultural(self): return self.IncentivadorCultural
def set_IncentivadorCultural(self, IncentivadorCultural): self.IncentivadorCultural = IncentivadorCultural
def get_Competencia(self): return self.Competencia
def set_Competencia(self, Competencia): self.Competencia = Competencia
def get_NfseSubstituida(self): return self.NfseSubstituida
def set_NfseSubstituida(self, NfseSubstituida): self.NfseSubstituida = NfseSubstituida
def get_OutrasInformacoes(self): return self.OutrasInformacoes
def set_OutrasInformacoes(self, OutrasInformacoes): self.OutrasInformacoes = OutrasInformacoes
def get_Servico(self): return self.Servico
def set_Servico(self, Servico): self.Servico = Servico
def get_ValorCredito(self): return self.ValorCredito
def set_ValorCredito(self, ValorCredito): self.ValorCredito = ValorCredito
def get_PrestadorServico(self): return self.PrestadorServico
def set_PrestadorServico(self, PrestadorServico): self.PrestadorServico = PrestadorServico
def get_TomadorServico(self): return self.TomadorServico
def set_TomadorServico(self, TomadorServico): self.TomadorServico = TomadorServico
def get_IntermediarioServico(self): return self.IntermediarioServico
def set_IntermediarioServico(self, IntermediarioServico): self.IntermediarioServico = IntermediarioServico
def get_OrgaoGerador(self): return self.OrgaoGerador
def set_OrgaoGerador(self, OrgaoGerador): self.OrgaoGerador = OrgaoGerador
def get_ConstrucaoCivil(self): return self.ConstrucaoCivil
def set_ConstrucaoCivil(self, ConstrucaoCivil): self.ConstrucaoCivil = ConstrucaoCivil
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def validate_tsNumeroNfse(self, value):
# Validate type tsNumeroNfse, a restriction on xsd:nonNegativeInteger.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsNumeroNfse' % {"value" : value} )
def validate_tsCodigoVerificacao(self, value):
# Validate type tsCodigoVerificacao, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 9:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsCodigoVerificacao' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsCodigoVerificacao' % {"value" : value.encode("utf-8")} )
def validate_tsNaturezaOperacao(self, value):
# Validate type tsNaturezaOperacao, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsNaturezaOperacao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsNaturezaOperacao_patterns_, ))
validate_tsNaturezaOperacao_patterns_ = [['^1$|^2$|^3$|^4$|^5$|^6$']]
def validate_tsRegimeEspecialTributacao(self, value):
# Validate type tsRegimeEspecialTributacao, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsRegimeEspecialTributacao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsRegimeEspecialTributacao_patterns_, ))
validate_tsRegimeEspecialTributacao_patterns_ = [['^0$|^1$|^2$|^3$|^4$|^5$|^6$']]
def validate_tsSimNao(self, value):
# Validate type tsSimNao, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsSimNao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsSimNao_patterns_, ))
validate_tsSimNao_patterns_ = [['^1$|^2$']]
def validate_tsOutrasInformacoes(self, value):
# Validate type tsOutrasInformacoes, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsOutrasInformacoes' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsOutrasInformacoes' % {"value" : value.encode("utf-8")} )
def validate_tsValor(self, value):
# Validate type tsValor, a restriction on xsd:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tsValor' % {"value" : value} )
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsValor' % {"value" : value} )
def validate_tsIdTag(self, value):
# Validate type tsIdTag, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsIdTag' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.Numero is not None or
self.CodigoVerificacao is not None or
self.DataEmissao is not None or
self.IdentificacaoRps is not None or
self.DataEmissaoRps is not None or
self.NaturezaOperacao is not None or
self.RegimeEspecialTributacao is not None or
self.OptanteSimplesNacional is not None or
self.IncentivadorCultural is not None or
self.Competencia is not None or
self.NfseSubstituida is not None or
self.OutrasInformacoes is not None or
self.Servico is not None or
self.ValorCredito is not None or
self.PrestadorServico is not None or
self.TomadorServico is not None or
self.IntermediarioServico is not None or
self.OrgaoGerador is not None or
self.ConstrucaoCivil is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcInfNfse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcInfNfse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcInfNfse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcInfNfse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcInfNfse'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='tcInfNfse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Numero is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Numero>%s</Numero>%s' % (self.gds_format_integer(self.Numero, input_name='Numero'), eol_))
if self.CodigoVerificacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoVerificacao>%s</CodigoVerificacao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CodigoVerificacao), input_name='CodigoVerificacao')), eol_))
if self.DataEmissao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<DataEmissao>%s</DataEmissao>%s' % (self.gds_format_datetime(self.DataEmissao, input_name='DataEmissao'), eol_))
if self.IdentificacaoRps is not None:
self.IdentificacaoRps.export(outfile, level, namespace_, name_='IdentificacaoRps', pretty_print=pretty_print)
if self.DataEmissaoRps is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<DataEmissaoRps>%s</DataEmissaoRps>%s' % (self.gds_format_date(self.DataEmissaoRps, input_name='DataEmissaoRps'), eol_))
if self.NaturezaOperacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<NaturezaOperacao>%s</NaturezaOperacao>%s' % (self.gds_format_integer(self.NaturezaOperacao, input_name='NaturezaOperacao'), eol_))
if self.RegimeEspecialTributacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<RegimeEspecialTributacao>%s</RegimeEspecialTributacao>%s' % (self.gds_format_integer(self.RegimeEspecialTributacao, input_name='RegimeEspecialTributacao'), eol_))
if self.OptanteSimplesNacional is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<OptanteSimplesNacional>%s</OptanteSimplesNacional>%s' % (self.gds_format_integer(self.OptanteSimplesNacional, input_name='OptanteSimplesNacional'), eol_))
if self.IncentivadorCultural is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<IncentivadorCultural>%s</IncentivadorCultural>%s' % (self.gds_format_integer(self.IncentivadorCultural, input_name='IncentivadorCultural'), eol_))
if self.Competencia is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Competencia>%s</Competencia>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Competencia), input_name='Competencia')), eol_))
if self.NfseSubstituida is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<NfseSubstituida>%s</NfseSubstituida>%s' % (self.gds_format_integer(self.NfseSubstituida, input_name='NfseSubstituida'), eol_))
if self.OutrasInformacoes is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<OutrasInformacoes>%s</OutrasInformacoes>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.OutrasInformacoes), input_name='OutrasInformacoes')), eol_))
if self.Servico is not None:
self.Servico.export(outfile, level, namespace_, name_='Servico', pretty_print=pretty_print)
if self.ValorCredito is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorCredito>%s</ValorCredito>%s' % (self.gds_format_float(self.ValorCredito, input_name='ValorCredito'), eol_))
if self.PrestadorServico is not None:
self.PrestadorServico.export(outfile, level, namespace_, name_='PrestadorServico', pretty_print=pretty_print)
if self.TomadorServico is not None:
self.TomadorServico.export(outfile, level, namespace_, name_='TomadorServico', pretty_print=pretty_print)
if self.IntermediarioServico is not None:
self.IntermediarioServico.export(outfile, level, namespace_, name_='IntermediarioServico', pretty_print=pretty_print)
if self.OrgaoGerador is not None:
self.OrgaoGerador.export(outfile, level, namespace_, name_='OrgaoGerador', pretty_print=pretty_print)
if self.ConstrucaoCivil is not None:
self.ConstrucaoCivil.export(outfile, level, namespace_, name_='ConstrucaoCivil', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
self.validate_tsIdTag(self.Id) # validate type tsIdTag
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Numero':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'Numero')
self.Numero = ival_
# validate type tsNumeroNfse
self.validate_tsNumeroNfse(self.Numero)
elif nodeName_ == 'CodigoVerificacao':
CodigoVerificacao_ = child_.text
CodigoVerificacao_ = self.gds_validate_string(CodigoVerificacao_, node, 'CodigoVerificacao')
self.CodigoVerificacao = CodigoVerificacao_
# validate type tsCodigoVerificacao
self.validate_tsCodigoVerificacao(self.CodigoVerificacao)
elif nodeName_ == 'DataEmissao':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.DataEmissao = dval_
elif nodeName_ == 'IdentificacaoRps':
obj_ = tcIdentificacaoRps.factory()
obj_.build(child_)
self.IdentificacaoRps = obj_
obj_.original_tagname_ = 'IdentificacaoRps'
elif nodeName_ == 'DataEmissaoRps':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.DataEmissaoRps = dval_
elif nodeName_ == 'NaturezaOperacao':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NaturezaOperacao')
self.NaturezaOperacao = ival_
# validate type tsNaturezaOperacao
self.validate_tsNaturezaOperacao(self.NaturezaOperacao)
elif nodeName_ == 'RegimeEspecialTributacao':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'RegimeEspecialTributacao')
self.RegimeEspecialTributacao = ival_
# validate type tsRegimeEspecialTributacao
self.validate_tsRegimeEspecialTributacao(self.RegimeEspecialTributacao)
elif nodeName_ == 'OptanteSimplesNacional':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'OptanteSimplesNacional')
self.OptanteSimplesNacional = ival_
# validate type tsSimNao
self.validate_tsSimNao(self.OptanteSimplesNacional)
elif nodeName_ == 'IncentivadorCultural':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'IncentivadorCultural')
self.IncentivadorCultural = ival_
# validate type tsSimNao
self.validate_tsSimNao(self.IncentivadorCultural)
elif nodeName_ == 'Competencia':
Competencia_ = child_.text
Competencia_ = self.gds_validate_string(Competencia_, node, 'Competencia')
self.Competencia = Competencia_
elif nodeName_ == 'NfseSubstituida':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'NfseSubstituida')
self.NfseSubstituida = ival_
# validate type tsNumeroNfse
self.validate_tsNumeroNfse(self.NfseSubstituida)
elif nodeName_ == 'OutrasInformacoes':
OutrasInformacoes_ = child_.text
OutrasInformacoes_ = self.gds_validate_string(OutrasInformacoes_, node, 'OutrasInformacoes')
self.OutrasInformacoes = OutrasInformacoes_
# validate type tsOutrasInformacoes
self.validate_tsOutrasInformacoes(self.OutrasInformacoes)
elif nodeName_ == 'Servico':
obj_ = tcDadosServico.factory()
obj_.build(child_)
self.Servico = obj_
obj_.original_tagname_ = 'Servico'
elif nodeName_ == 'ValorCredito':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorCredito')
self.ValorCredito = fval_
# validate type tsValor
self.validate_tsValor(self.ValorCredito)
elif nodeName_ == 'PrestadorServico':
obj_ = tcDadosPrestador.factory()
obj_.build(child_)
self.PrestadorServico = obj_
obj_.original_tagname_ = 'PrestadorServico'
elif nodeName_ == 'TomadorServico':
obj_ = tcDadosTomador.factory()
obj_.build(child_)
self.TomadorServico = obj_
obj_.original_tagname_ = 'TomadorServico'
elif nodeName_ == 'IntermediarioServico':
obj_ = tcIdentificacaoIntermediarioServico.factory()
obj_.build(child_)
self.IntermediarioServico = obj_
obj_.original_tagname_ = 'IntermediarioServico'
elif nodeName_ == 'OrgaoGerador':
obj_ = tcIdentificacaoOrgaoGerador.factory()
obj_.build(child_)
self.OrgaoGerador = obj_
obj_.original_tagname_ = 'OrgaoGerador'
elif nodeName_ == 'ConstrucaoCivil':
obj_ = tcDadosConstrucaoCivil.factory()
obj_.build(child_)
self.ConstrucaoCivil = obj_
obj_.original_tagname_ = 'ConstrucaoCivil'
# end class tcInfNfse
class tcNfse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, InfNfse=None, Signature=None):
self.original_tagname_ = None
self.InfNfse = InfNfse
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcNfse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcNfse.subclass:
return tcNfse.subclass(*args_, **kwargs_)
else:
return tcNfse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_InfNfse(self): return self.InfNfse
def set_InfNfse(self, InfNfse): self.InfNfse = InfNfse
def get_Signature(self): return self.Signature
def set_Signature(self, Signature): self.Signature = Signature
def hasContent_(self):
if (
self.InfNfse is not None or
self.Signature is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcNfse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcNfse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcNfse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcNfse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcNfse'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcNfse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.InfNfse is not None:
self.InfNfse.export(outfile, level, namespace_, name_='InfNfse', pretty_print=pretty_print)
if self.Signature is not None:
self.Signature.export(outfile, level, namespace_='dsig:', name_='Signature', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'InfNfse':
obj_ = tcInfNfse.factory()
obj_.build(child_)
self.InfNfse = obj_
obj_.original_tagname_ = 'InfNfse'
elif nodeName_ == 'Signature':
obj_ = SignatureType.factory()
obj_.build(child_)
self.Signature = obj_
obj_.original_tagname_ = 'Signature'
# end class tcNfse
class tcInfPedidoCancelamento(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, IdentificacaoNfse=None, CodigoCancelamento=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.IdentificacaoNfse = IdentificacaoNfse
self.CodigoCancelamento = CodigoCancelamento
self.validate_tsCodigoCancelamentoNfse(self.CodigoCancelamento)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcInfPedidoCancelamento)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcInfPedidoCancelamento.subclass:
return tcInfPedidoCancelamento.subclass(*args_, **kwargs_)
else:
return tcInfPedidoCancelamento(*args_, **kwargs_)
factory = staticmethod(factory)
def get_IdentificacaoNfse(self): return self.IdentificacaoNfse
def set_IdentificacaoNfse(self, IdentificacaoNfse): self.IdentificacaoNfse = IdentificacaoNfse
def get_CodigoCancelamento(self): return self.CodigoCancelamento
def set_CodigoCancelamento(self, CodigoCancelamento): self.CodigoCancelamento = CodigoCancelamento
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def validate_tsCodigoCancelamentoNfse(self, value):
# Validate type tsCodigoCancelamentoNfse, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 4:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsCodigoCancelamentoNfse' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsCodigoCancelamentoNfse' % {"value" : value.encode("utf-8")} )
def validate_tsIdTag(self, value):
# Validate type tsIdTag, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsIdTag' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.IdentificacaoNfse is not None or
self.CodigoCancelamento is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcInfPedidoCancelamento', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcInfPedidoCancelamento')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcInfPedidoCancelamento')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcInfPedidoCancelamento', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcInfPedidoCancelamento'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='tcInfPedidoCancelamento', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.IdentificacaoNfse is not None:
self.IdentificacaoNfse.export(outfile, level, namespace_, name_='IdentificacaoNfse', pretty_print=pretty_print)
if self.CodigoCancelamento is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoCancelamento>%s</CodigoCancelamento>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CodigoCancelamento), input_name='CodigoCancelamento')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
self.validate_tsIdTag(self.Id) # validate type tsIdTag
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'IdentificacaoNfse':
obj_ = tcIdentificacaoNfse.factory()
obj_.build(child_)
self.IdentificacaoNfse = obj_
obj_.original_tagname_ = 'IdentificacaoNfse'
elif nodeName_ == 'CodigoCancelamento':
CodigoCancelamento_ = child_.text
CodigoCancelamento_ = self.gds_validate_string(CodigoCancelamento_, node, 'CodigoCancelamento')
self.CodigoCancelamento = CodigoCancelamento_
# validate type tsCodigoCancelamentoNfse
self.validate_tsCodigoCancelamentoNfse(self.CodigoCancelamento)
# end class tcInfPedidoCancelamento
class tcPedidoCancelamento(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, InfPedidoCancelamento=None, Signature=None):
self.original_tagname_ = None
self.InfPedidoCancelamento = InfPedidoCancelamento
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcPedidoCancelamento)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcPedidoCancelamento.subclass:
return tcPedidoCancelamento.subclass(*args_, **kwargs_)
else:
return tcPedidoCancelamento(*args_, **kwargs_)
factory = staticmethod(factory)
def get_InfPedidoCancelamento(self): return self.InfPedidoCancelamento
def set_InfPedidoCancelamento(self, InfPedidoCancelamento): self.InfPedidoCancelamento = InfPedidoCancelamento
def get_Signature(self): return self.Signature
def set_Signature(self, Signature): self.Signature = Signature
def hasContent_(self):
if (
self.InfPedidoCancelamento is not None or
self.Signature is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcPedidoCancelamento', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcPedidoCancelamento')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcPedidoCancelamento')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcPedidoCancelamento', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcPedidoCancelamento'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcPedidoCancelamento', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.InfPedidoCancelamento is not None:
self.InfPedidoCancelamento.export(outfile, level, namespace_, name_='InfPedidoCancelamento', pretty_print=pretty_print)
if self.Signature is not None:
self.Signature.export(outfile, level, namespace_='dsig:', name_='Signature', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'InfPedidoCancelamento':
obj_ = tcInfPedidoCancelamento.factory()
obj_.build(child_)
self.InfPedidoCancelamento = obj_
obj_.original_tagname_ = 'InfPedidoCancelamento'
elif nodeName_ == 'Signature':
obj_ = SignatureType.factory()
obj_.build(child_)
self.Signature = obj_
obj_.original_tagname_ = 'Signature'
# end class tcPedidoCancelamento
class tcInfConfirmacaoCancelamento(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Sucesso=None, DataHora=None):
self.original_tagname_ = None
self.Sucesso = Sucesso
if isinstance(DataHora, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DataHora, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = DataHora
self.DataHora = initvalue_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcInfConfirmacaoCancelamento)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcInfConfirmacaoCancelamento.subclass:
return tcInfConfirmacaoCancelamento.subclass(*args_, **kwargs_)
else:
return tcInfConfirmacaoCancelamento(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Sucesso(self): return self.Sucesso
def set_Sucesso(self, Sucesso): self.Sucesso = Sucesso
def get_DataHora(self): return self.DataHora
def set_DataHora(self, DataHora): self.DataHora = DataHora
def hasContent_(self):
if (
self.Sucesso is not None or
self.DataHora is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcInfConfirmacaoCancelamento', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcInfConfirmacaoCancelamento')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcInfConfirmacaoCancelamento')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcInfConfirmacaoCancelamento', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcInfConfirmacaoCancelamento'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcInfConfirmacaoCancelamento', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Sucesso is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Sucesso>%s</Sucesso>%s' % (self.gds_format_boolean(self.Sucesso, input_name='Sucesso'), eol_))
if self.DataHora is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<DataHora>%s</DataHora>%s' % (self.gds_format_datetime(self.DataHora, input_name='DataHora'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Sucesso':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'Sucesso')
self.Sucesso = ival_
elif nodeName_ == 'DataHora':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.DataHora = dval_
# end class tcInfConfirmacaoCancelamento
class tcConfirmacaoCancelamento(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, Pedido=None, InfConfirmacaoCancelamento=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.Pedido = Pedido
self.InfConfirmacaoCancelamento = InfConfirmacaoCancelamento
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcConfirmacaoCancelamento)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcConfirmacaoCancelamento.subclass:
return tcConfirmacaoCancelamento.subclass(*args_, **kwargs_)
else:
return tcConfirmacaoCancelamento(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Pedido(self): return self.Pedido
def set_Pedido(self, Pedido): self.Pedido = Pedido
def get_InfConfirmacaoCancelamento(self): return self.InfConfirmacaoCancelamento
def set_InfConfirmacaoCancelamento(self, InfConfirmacaoCancelamento): self.InfConfirmacaoCancelamento = InfConfirmacaoCancelamento
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def validate_tsIdTag(self, value):
# Validate type tsIdTag, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsIdTag' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.Pedido is not None or
self.InfConfirmacaoCancelamento is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcConfirmacaoCancelamento', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcConfirmacaoCancelamento')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcConfirmacaoCancelamento')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcConfirmacaoCancelamento', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcConfirmacaoCancelamento'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='tcConfirmacaoCancelamento', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Pedido is not None:
self.Pedido.export(outfile, level, namespace_, name_='Pedido', pretty_print=pretty_print)
if self.InfConfirmacaoCancelamento is not None:
self.InfConfirmacaoCancelamento.export(outfile, level, namespace_, name_='InfConfirmacaoCancelamento', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
self.validate_tsIdTag(self.Id) # validate type tsIdTag
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Pedido':
obj_ = tcPedidoCancelamento.factory()
obj_.build(child_)
self.Pedido = obj_
obj_.original_tagname_ = 'Pedido'
elif nodeName_ == 'InfConfirmacaoCancelamento':
obj_ = tcInfConfirmacaoCancelamento.factory()
obj_.build(child_)
self.InfConfirmacaoCancelamento = obj_
obj_.original_tagname_ = 'InfConfirmacaoCancelamento'
# end class tcConfirmacaoCancelamento
class tcCancelamentoNfse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Confirmacao=None, Signature=None):
self.original_tagname_ = None
self.Confirmacao = Confirmacao
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcCancelamentoNfse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcCancelamentoNfse.subclass:
return tcCancelamentoNfse.subclass(*args_, **kwargs_)
else:
return tcCancelamentoNfse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Confirmacao(self): return self.Confirmacao
def set_Confirmacao(self, Confirmacao): self.Confirmacao = Confirmacao
def get_Signature(self): return self.Signature
def set_Signature(self, Signature): self.Signature = Signature
def hasContent_(self):
if (
self.Confirmacao is not None or
self.Signature is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcCancelamentoNfse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcCancelamentoNfse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcCancelamentoNfse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcCancelamentoNfse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcCancelamentoNfse'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcCancelamentoNfse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Confirmacao is not None:
self.Confirmacao.export(outfile, level, namespace_, name_='Confirmacao', pretty_print=pretty_print)
if self.Signature is not None:
self.Signature.export(outfile, level, namespace_='dsig:', name_='Signature', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Confirmacao':
obj_ = tcConfirmacaoCancelamento.factory()
obj_.build(child_)
self.Confirmacao = obj_
obj_.original_tagname_ = 'Confirmacao'
elif nodeName_ == 'Signature':
obj_ = SignatureType.factory()
obj_.build(child_)
self.Signature = obj_
obj_.original_tagname_ = 'Signature'
# end class tcCancelamentoNfse
class tcInfSubstituicaoNfse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, NfseSubstituidora=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.NfseSubstituidora = NfseSubstituidora
self.validate_tsNumeroNfse(self.NfseSubstituidora)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcInfSubstituicaoNfse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcInfSubstituicaoNfse.subclass:
return tcInfSubstituicaoNfse.subclass(*args_, **kwargs_)
else:
return tcInfSubstituicaoNfse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_NfseSubstituidora(self): return self.NfseSubstituidora
def set_NfseSubstituidora(self, NfseSubstituidora): self.NfseSubstituidora = NfseSubstituidora
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def validate_tsNumeroNfse(self, value):
# Validate type tsNumeroNfse, a restriction on xsd:nonNegativeInteger.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsNumeroNfse' % {"value" : value} )
def validate_tsIdTag(self, value):
# Validate type tsIdTag, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsIdTag' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.NfseSubstituidora is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcInfSubstituicaoNfse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcInfSubstituicaoNfse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcInfSubstituicaoNfse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcInfSubstituicaoNfse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcInfSubstituicaoNfse'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='tcInfSubstituicaoNfse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.NfseSubstituidora is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<NfseSubstituidora>%s</NfseSubstituidora>%s' % (self.gds_format_integer(self.NfseSubstituidora, input_name='NfseSubstituidora'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
self.validate_tsIdTag(self.Id) # validate type tsIdTag
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'NfseSubstituidora':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'NfseSubstituidora')
self.NfseSubstituidora = ival_
# validate type tsNumeroNfse
self.validate_tsNumeroNfse(self.NfseSubstituidora)
# end class tcInfSubstituicaoNfse
class tcSubstituicaoNfse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, SubstituicaoNfse=None, Signature=None):
self.original_tagname_ = None
self.SubstituicaoNfse = SubstituicaoNfse
if Signature is None:
self.Signature = []
else:
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcSubstituicaoNfse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcSubstituicaoNfse.subclass:
return tcSubstituicaoNfse.subclass(*args_, **kwargs_)
else:
return tcSubstituicaoNfse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_SubstituicaoNfse(self): return self.SubstituicaoNfse
def set_SubstituicaoNfse(self, SubstituicaoNfse): self.SubstituicaoNfse = SubstituicaoNfse
def get_Signature(self): return self.Signature
def set_Signature(self, Signature): self.Signature = Signature
def add_Signature(self, value): self.Signature.append(value)
def insert_Signature_at(self, index, value): self.Signature.insert(index, value)
def replace_Signature_at(self, index, value): self.Signature[index] = value
def hasContent_(self):
if (
self.SubstituicaoNfse is not None or
self.Signature
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcSubstituicaoNfse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcSubstituicaoNfse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcSubstituicaoNfse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcSubstituicaoNfse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcSubstituicaoNfse'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcSubstituicaoNfse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.SubstituicaoNfse is not None:
self.SubstituicaoNfse.export(outfile, level, namespace_, name_='SubstituicaoNfse', pretty_print=pretty_print)
for Signature_ in self.Signature:
Signature_.export(outfile, level, namespace_='dsig:', name_='Signature', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'SubstituicaoNfse':
obj_ = tcInfSubstituicaoNfse.factory()
obj_.build(child_)
self.SubstituicaoNfse = obj_
obj_.original_tagname_ = 'SubstituicaoNfse'
elif nodeName_ == 'Signature':
obj_ = SignatureType.factory()
obj_.build(child_)
self.Signature.append(obj_)
obj_.original_tagname_ = 'Signature'
# end class tcSubstituicaoNfse
class tcCompNfse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Nfse=None, NfseCancelamento=None, NfseSubstituicao=None):
self.original_tagname_ = None
self.Nfse = Nfse
self.NfseCancelamento = NfseCancelamento
self.NfseSubstituicao = NfseSubstituicao
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcCompNfse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcCompNfse.subclass:
return tcCompNfse.subclass(*args_, **kwargs_)
else:
return tcCompNfse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Nfse(self): return self.Nfse
def set_Nfse(self, Nfse): self.Nfse = Nfse
def get_NfseCancelamento(self): return self.NfseCancelamento
def set_NfseCancelamento(self, NfseCancelamento): self.NfseCancelamento = NfseCancelamento
def get_NfseSubstituicao(self): return self.NfseSubstituicao
def set_NfseSubstituicao(self, NfseSubstituicao): self.NfseSubstituicao = NfseSubstituicao
def hasContent_(self):
if (
self.Nfse is not None or
self.NfseCancelamento is not None or
self.NfseSubstituicao is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcCompNfse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcCompNfse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcCompNfse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcCompNfse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcCompNfse'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcCompNfse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Nfse is not None:
self.Nfse.export(outfile, level, namespace_, name_='Nfse', pretty_print=pretty_print)
if self.NfseCancelamento is not None:
self.NfseCancelamento.export(outfile, level, namespace_, name_='NfseCancelamento', pretty_print=pretty_print)
if self.NfseSubstituicao is not None:
self.NfseSubstituicao.export(outfile, level, namespace_, name_='NfseSubstituicao', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Nfse':
obj_ = tcNfse.factory()
obj_.build(child_)
self.Nfse = obj_
obj_.original_tagname_ = 'Nfse'
elif nodeName_ == 'NfseCancelamento':
obj_ = tcCancelamentoNfse.factory()
obj_.build(child_)
self.NfseCancelamento = obj_
obj_.original_tagname_ = 'NfseCancelamento'
elif nodeName_ == 'NfseSubstituicao':
obj_ = tcSubstituicaoNfse.factory()
obj_.build(child_)
self.NfseSubstituicao = obj_
obj_.original_tagname_ = 'NfseSubstituicao'
# end class tcCompNfse
class ListaMensagemRetorno(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, MensagemRetorno=None):
self.original_tagname_ = None
if MensagemRetorno is None:
self.MensagemRetorno = []
else:
self.MensagemRetorno = MensagemRetorno
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ListaMensagemRetorno)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ListaMensagemRetorno.subclass:
return ListaMensagemRetorno.subclass(*args_, **kwargs_)
else:
return ListaMensagemRetorno(*args_, **kwargs_)
factory = staticmethod(factory)
def get_MensagemRetorno(self): return self.MensagemRetorno
def set_MensagemRetorno(self, MensagemRetorno): self.MensagemRetorno = MensagemRetorno
def add_MensagemRetorno(self, value): self.MensagemRetorno.append(value)
def insert_MensagemRetorno_at(self, index, value): self.MensagemRetorno.insert(index, value)
def replace_MensagemRetorno_at(self, index, value): self.MensagemRetorno[index] = value
def hasContent_(self):
if (
self.MensagemRetorno
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ListaMensagemRetorno', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ListaMensagemRetorno')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ListaMensagemRetorno')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ListaMensagemRetorno', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ListaMensagemRetorno'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='ListaMensagemRetorno', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for MensagemRetorno_ in self.MensagemRetorno:
MensagemRetorno_.export(outfile, level, namespace_, name_='MensagemRetorno', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'MensagemRetorno':
obj_ = tcMensagemRetorno.factory()
obj_.build(child_)
self.MensagemRetorno.append(obj_)
obj_.original_tagname_ = 'MensagemRetorno'
# end class ListaMensagemRetorno
class tcMensagemRetorno(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Codigo=None, Mensagem=None, Correcao=None):
self.original_tagname_ = None
self.Codigo = Codigo
self.validate_tsCodigoMensagemAlerta(self.Codigo)
self.Mensagem = Mensagem
self.validate_tsDescricaoMensagemAlerta(self.Mensagem)
self.Correcao = Correcao
self.validate_tsDescricaoMensagemAlerta(self.Correcao)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcMensagemRetorno)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcMensagemRetorno.subclass:
return tcMensagemRetorno.subclass(*args_, **kwargs_)
else:
return tcMensagemRetorno(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Codigo(self): return self.Codigo
def set_Codigo(self, Codigo): self.Codigo = Codigo
def get_Mensagem(self): return self.Mensagem
def set_Mensagem(self, Mensagem): self.Mensagem = Mensagem
def get_Correcao(self): return self.Correcao
def set_Correcao(self, Correcao): self.Correcao = Correcao
def validate_tsCodigoMensagemAlerta(self, value):
# Validate type tsCodigoMensagemAlerta, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 4:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsCodigoMensagemAlerta' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsCodigoMensagemAlerta' % {"value" : value.encode("utf-8")} )
def validate_tsDescricaoMensagemAlerta(self, value):
# Validate type tsDescricaoMensagemAlerta, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 200:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsDescricaoMensagemAlerta' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsDescricaoMensagemAlerta' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.Codigo is not None or
self.Mensagem is not None or
self.Correcao is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcMensagemRetorno', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcMensagemRetorno')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcMensagemRetorno')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcMensagemRetorno', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcMensagemRetorno'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcMensagemRetorno', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Codigo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Codigo>%s</Codigo>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Codigo), input_name='Codigo')), eol_))
if self.Mensagem is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Mensagem>%s</Mensagem>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Mensagem), input_name='Mensagem')), eol_))
if self.Correcao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Correcao>%s</Correcao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Correcao), input_name='Correcao')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Codigo':
Codigo_ = child_.text
Codigo_ = self.gds_validate_string(Codigo_, node, 'Codigo')
self.Codigo = Codigo_
# validate type tsCodigoMensagemAlerta
self.validate_tsCodigoMensagemAlerta(self.Codigo)
elif nodeName_ == 'Mensagem':
Mensagem_ = child_.text
Mensagem_ = self.gds_validate_string(Mensagem_, node, 'Mensagem')
self.Mensagem = Mensagem_
# validate type tsDescricaoMensagemAlerta
self.validate_tsDescricaoMensagemAlerta(self.Mensagem)
elif nodeName_ == 'Correcao':
Correcao_ = child_.text
Correcao_ = self.gds_validate_string(Correcao_, node, 'Correcao')
self.Correcao = Correcao_
# validate type tsDescricaoMensagemAlerta
self.validate_tsDescricaoMensagemAlerta(self.Correcao)
# end class tcMensagemRetorno
class tcMensagemRetornoLote(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, IdentificacaoRps=None, Codigo=None, Mensagem=None):
self.original_tagname_ = None
self.IdentificacaoRps = IdentificacaoRps
self.Codigo = Codigo
self.validate_tsCodigoMensagemAlerta(self.Codigo)
self.Mensagem = Mensagem
self.validate_tsDescricaoMensagemAlerta(self.Mensagem)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcMensagemRetornoLote)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcMensagemRetornoLote.subclass:
return tcMensagemRetornoLote.subclass(*args_, **kwargs_)
else:
return tcMensagemRetornoLote(*args_, **kwargs_)
factory = staticmethod(factory)
def get_IdentificacaoRps(self): return self.IdentificacaoRps
def set_IdentificacaoRps(self, IdentificacaoRps): self.IdentificacaoRps = IdentificacaoRps
def get_Codigo(self): return self.Codigo
def set_Codigo(self, Codigo): self.Codigo = Codigo
def get_Mensagem(self): return self.Mensagem
def set_Mensagem(self, Mensagem): self.Mensagem = Mensagem
def validate_tsCodigoMensagemAlerta(self, value):
# Validate type tsCodigoMensagemAlerta, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 4:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsCodigoMensagemAlerta' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsCodigoMensagemAlerta' % {"value" : value.encode("utf-8")} )
def validate_tsDescricaoMensagemAlerta(self, value):
# Validate type tsDescricaoMensagemAlerta, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 200:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsDescricaoMensagemAlerta' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsDescricaoMensagemAlerta' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.IdentificacaoRps is not None or
self.Codigo is not None or
self.Mensagem is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcMensagemRetornoLote', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcMensagemRetornoLote')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcMensagemRetornoLote')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcMensagemRetornoLote', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcMensagemRetornoLote'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcMensagemRetornoLote', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.IdentificacaoRps is not None:
self.IdentificacaoRps.export(outfile, level, namespace_, name_='IdentificacaoRps', pretty_print=pretty_print)
if self.Codigo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Codigo>%s</Codigo>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Codigo), input_name='Codigo')), eol_))
if self.Mensagem is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Mensagem>%s</Mensagem>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Mensagem), input_name='Mensagem')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'IdentificacaoRps':
obj_ = tcIdentificacaoRps.factory()
obj_.build(child_)
self.IdentificacaoRps = obj_
obj_.original_tagname_ = 'IdentificacaoRps'
elif nodeName_ == 'Codigo':
Codigo_ = child_.text
Codigo_ = self.gds_validate_string(Codigo_, node, 'Codigo')
self.Codigo = Codigo_
# validate type tsCodigoMensagemAlerta
self.validate_tsCodigoMensagemAlerta(self.Codigo)
elif nodeName_ == 'Mensagem':
Mensagem_ = child_.text
Mensagem_ = self.gds_validate_string(Mensagem_, node, 'Mensagem')
self.Mensagem = Mensagem_
# validate type tsDescricaoMensagemAlerta
self.validate_tsDescricaoMensagemAlerta(self.Mensagem)
# end class tcMensagemRetornoLote
class tcLoteRps(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, NumeroLote=None, Cnpj=None, InscricaoMunicipal=None, QuantidadeRps=None, ListaRps=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.NumeroLote = NumeroLote
self.validate_tsNumeroLote(self.NumeroLote)
self.Cnpj = Cnpj
self.validate_tsCnpj(self.Cnpj)
self.InscricaoMunicipal = InscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
self.QuantidadeRps = QuantidadeRps
self.validate_tsQuantidadeRps(self.QuantidadeRps)
self.ListaRps = ListaRps
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcLoteRps)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcLoteRps.subclass:
return tcLoteRps.subclass(*args_, **kwargs_)
else:
return tcLoteRps(*args_, **kwargs_)
factory = staticmethod(factory)
def get_NumeroLote(self): return self.NumeroLote
def set_NumeroLote(self, NumeroLote): self.NumeroLote = NumeroLote
def get_Cnpj(self): return self.Cnpj
def set_Cnpj(self, Cnpj): self.Cnpj = Cnpj
def get_InscricaoMunicipal(self): return self.InscricaoMunicipal
def set_InscricaoMunicipal(self, InscricaoMunicipal): self.InscricaoMunicipal = InscricaoMunicipal
def get_QuantidadeRps(self): return self.QuantidadeRps
def set_QuantidadeRps(self, QuantidadeRps): self.QuantidadeRps = QuantidadeRps
def get_ListaRps(self): return self.ListaRps
def set_ListaRps(self, ListaRps): self.ListaRps = ListaRps
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def validate_tsNumeroLote(self, value):
# Validate type tsNumeroLote, a restriction on xsd:nonNegativeInteger.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsNumeroLote' % {"value" : value} )
def validate_tsCnpj(self, value):
# Validate type tsCnpj, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) != 14:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on tsCnpj' % {"value" : value.encode("utf-8")} )
def validate_tsInscricaoMunicipal(self, value):
# Validate type tsInscricaoMunicipal, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 15:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
def validate_tsQuantidadeRps(self, value):
# Validate type tsQuantidadeRps, a restriction on xsd:int.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 4:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsQuantidadeRps' % {"value" : value} )
def validate_tsIdTag(self, value):
# Validate type tsIdTag, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsIdTag' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.NumeroLote is not None or
self.Cnpj is not None or
self.InscricaoMunicipal is not None or
self.QuantidadeRps is not None or
self.ListaRps is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcLoteRps', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcLoteRps')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcLoteRps')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcLoteRps', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcLoteRps'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='tcLoteRps', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.NumeroLote is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<NumeroLote>%s</NumeroLote>%s' % (self.gds_format_integer(self.NumeroLote, input_name='NumeroLote'), eol_))
if self.Cnpj is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Cnpj>%s</Cnpj>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Cnpj), input_name='Cnpj')), eol_))
if self.InscricaoMunicipal is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<InscricaoMunicipal>%s</InscricaoMunicipal>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.InscricaoMunicipal), input_name='InscricaoMunicipal')), eol_))
if self.QuantidadeRps is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<QuantidadeRps>%s</QuantidadeRps>%s' % (self.gds_format_integer(self.QuantidadeRps, input_name='QuantidadeRps'), eol_))
if self.ListaRps is not None:
self.ListaRps.export(outfile, level, namespace_, name_='ListaRps', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
self.validate_tsIdTag(self.Id) # validate type tsIdTag
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'NumeroLote':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'NumeroLote')
self.NumeroLote = ival_
# validate type tsNumeroLote
self.validate_tsNumeroLote(self.NumeroLote)
elif nodeName_ == 'Cnpj':
Cnpj_ = child_.text
Cnpj_ = self.gds_validate_string(Cnpj_, node, 'Cnpj')
self.Cnpj = Cnpj_
# validate type tsCnpj
self.validate_tsCnpj(self.Cnpj)
elif nodeName_ == 'InscricaoMunicipal':
InscricaoMunicipal_ = child_.text
InscricaoMunicipal_ = self.gds_validate_string(InscricaoMunicipal_, node, 'InscricaoMunicipal')
self.InscricaoMunicipal = InscricaoMunicipal_
# validate type tsInscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
elif nodeName_ == 'QuantidadeRps':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'QuantidadeRps')
self.QuantidadeRps = ival_
# validate type tsQuantidadeRps
self.validate_tsQuantidadeRps(self.QuantidadeRps)
elif nodeName_ == 'ListaRps':
obj_ = ListaRpsType.factory()
obj_.build(child_)
self.ListaRps = obj_
obj_.original_tagname_ = 'ListaRps'
# end class tcLoteRps
class SignatureType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, SignedInfo=None, SignatureValue=None, KeyInfo=None, Object=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.SignedInfo = SignedInfo
self.SignatureValue = SignatureValue
self.KeyInfo = KeyInfo
if Object is None:
self.Object = []
else:
self.Object = Object
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignatureType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignatureType.subclass:
return SignatureType.subclass(*args_, **kwargs_)
else:
return SignatureType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_SignedInfo(self): return self.SignedInfo
def set_SignedInfo(self, SignedInfo): self.SignedInfo = SignedInfo
def get_SignatureValue(self): return self.SignatureValue
def set_SignatureValue(self, SignatureValue): self.SignatureValue = SignatureValue
def get_KeyInfo(self): return self.KeyInfo
def set_KeyInfo(self, KeyInfo): self.KeyInfo = KeyInfo
def get_Object(self): return self.Object
def set_Object(self, Object): self.Object = Object
def add_Object(self, value): self.Object.append(value)
def insert_Object_at(self, index, value): self.Object.insert(index, value)
def replace_Object_at(self, index, value): self.Object[index] = value
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def hasContent_(self):
if (
self.SignedInfo is not None or
self.SignatureValue is not None or
self.KeyInfo is not None or
self.Object
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='SignatureType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignatureType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignatureType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='SignatureType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SignatureType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='SignatureType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.SignedInfo is not None:
self.SignedInfo.export(outfile, level, namespace_='ds:', name_='SignedInfo', pretty_print=pretty_print)
if self.SignatureValue is not None:
self.SignatureValue.export(outfile, level, namespace_='ds:', name_='SignatureValue', pretty_print=pretty_print)
if self.KeyInfo is not None:
self.KeyInfo.export(outfile, level, namespace_='ds:', name_='KeyInfo', pretty_print=pretty_print)
for Object_ in self.Object:
Object_.export(outfile, level, namespace_='ds:', name_='Object', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'SignedInfo':
obj_ = SignedInfoType.factory()
obj_.build(child_)
self.SignedInfo = obj_
obj_.original_tagname_ = 'SignedInfo'
elif nodeName_ == 'SignatureValue':
obj_ = SignatureValueType.factory()
obj_.build(child_)
self.SignatureValue = obj_
obj_.original_tagname_ = 'SignatureValue'
elif nodeName_ == 'KeyInfo':
obj_ = KeyInfoType.factory()
obj_.build(child_)
self.KeyInfo = obj_
obj_.original_tagname_ = 'KeyInfo'
elif nodeName_ == 'Object':
obj_ = ObjectType.factory()
obj_.build(child_)
self.Object.append(obj_)
obj_.original_tagname_ = 'Object'
# end class SignatureType
class SignatureValueType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, valueOf_=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignatureValueType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignatureValueType.subclass:
return SignatureValueType.subclass(*args_, **kwargs_)
else:
return SignatureValueType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='SignatureValueType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignatureValueType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignatureValueType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespace_='', name_='SignatureValueType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SignatureValueType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='SignatureValueType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class SignatureValueType
class SignedInfoType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, CanonicalizationMethod=None, SignatureMethod=None, Reference=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.CanonicalizationMethod = CanonicalizationMethod
self.SignatureMethod = SignatureMethod
if Reference is None:
self.Reference = []
else:
self.Reference = Reference
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignedInfoType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignedInfoType.subclass:
return SignedInfoType.subclass(*args_, **kwargs_)
else:
return SignedInfoType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_CanonicalizationMethod(self): return self.CanonicalizationMethod
def set_CanonicalizationMethod(self, CanonicalizationMethod): self.CanonicalizationMethod = CanonicalizationMethod
def get_SignatureMethod(self): return self.SignatureMethod
def set_SignatureMethod(self, SignatureMethod): self.SignatureMethod = SignatureMethod
def get_Reference(self): return self.Reference
def set_Reference(self, Reference): self.Reference = Reference
def add_Reference(self, value): self.Reference.append(value)
def insert_Reference_at(self, index, value): self.Reference.insert(index, value)
def replace_Reference_at(self, index, value): self.Reference[index] = value
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def hasContent_(self):
if (
self.CanonicalizationMethod is not None or
self.SignatureMethod is not None or
self.Reference
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='SignedInfoType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignedInfoType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignedInfoType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='SignedInfoType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SignedInfoType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='SignedInfoType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CanonicalizationMethod is not None:
self.CanonicalizationMethod.export(outfile, level, namespace_='ds:', name_='CanonicalizationMethod', pretty_print=pretty_print)
if self.SignatureMethod is not None:
self.SignatureMethod.export(outfile, level, namespace_='ds:', name_='SignatureMethod', pretty_print=pretty_print)
for Reference_ in self.Reference:
Reference_.export(outfile, level, namespace_='ds:', name_='Reference', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CanonicalizationMethod':
obj_ = CanonicalizationMethodType.factory()
obj_.build(child_)
self.CanonicalizationMethod = obj_
obj_.original_tagname_ = 'CanonicalizationMethod'
elif nodeName_ == 'SignatureMethod':
obj_ = SignatureMethodType.factory()
obj_.build(child_)
self.SignatureMethod = obj_
obj_.original_tagname_ = 'SignatureMethod'
elif nodeName_ == 'Reference':
obj_ = ReferenceType.factory()
obj_.build(child_)
self.Reference.append(obj_)
obj_.original_tagname_ = 'Reference'
# end class SignedInfoType
class CanonicalizationMethodType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Algorithm=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.Algorithm = _cast(None, Algorithm)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, CanonicalizationMethodType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if CanonicalizationMethodType.subclass:
return CanonicalizationMethodType.subclass(*args_, **kwargs_)
else:
return CanonicalizationMethodType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_Algorithm(self): return self.Algorithm
def set_Algorithm(self, Algorithm): self.Algorithm = Algorithm
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.anytypeobjs_ or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='CanonicalizationMethodType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('CanonicalizationMethodType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='CanonicalizationMethodType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='CanonicalizationMethodType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='CanonicalizationMethodType'):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
outfile.write(' Algorithm=%s' % (quote_attrib(self.Algorithm), ))
def exportChildren(self, outfile, level, namespace_='', name_='CanonicalizationMethodType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Algorithm', node)
if value is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
self.Algorithm = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class CanonicalizationMethodType
class SignatureMethodType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Algorithm=None, HMACOutputLength=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.Algorithm = _cast(None, Algorithm)
self.HMACOutputLength = HMACOutputLength
self.validate_HMACOutputLengthType(self.HMACOutputLength)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignatureMethodType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignatureMethodType.subclass:
return SignatureMethodType.subclass(*args_, **kwargs_)
else:
return SignatureMethodType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_HMACOutputLength(self): return self.HMACOutputLength
def set_HMACOutputLength(self, HMACOutputLength): self.HMACOutputLength = HMACOutputLength
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_Algorithm(self): return self.Algorithm
def set_Algorithm(self, Algorithm): self.Algorithm = Algorithm
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def validate_HMACOutputLengthType(self, value):
# Validate type HMACOutputLengthType, a restriction on integer.
if value is not None and Validate_simpletypes_:
pass
def hasContent_(self):
if (
self.HMACOutputLength is not None or
self.anytypeobjs_ or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='SignatureMethodType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignatureMethodType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignatureMethodType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='SignatureMethodType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SignatureMethodType'):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
outfile.write(' Algorithm=%s' % (quote_attrib(self.Algorithm), ))
def exportChildren(self, outfile, level, namespace_='', name_='SignatureMethodType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.HMACOutputLength is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<HMACOutputLength>%s</HMACOutputLength>%s' % (self.gds_format_integer(self.HMACOutputLength, input_name='HMACOutputLength'), eol_))
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Algorithm', node)
if value is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
self.Algorithm = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'HMACOutputLength' and child_.text is not None:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
obj_ = self.mixedclass_(MixedContainer.CategorySimple,
MixedContainer.TypeInteger, 'HMACOutputLength', ival_)
self.content_.append(obj_)
elif nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class SignatureMethodType
class ReferenceType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, URI=None, Type=None, Transforms=None, DigestMethod=None, DigestValue=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.URI = _cast(None, URI)
self.Type = _cast(None, Type)
self.Transforms = Transforms
self.DigestMethod = DigestMethod
self.DigestValue = DigestValue
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ReferenceType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ReferenceType.subclass:
return ReferenceType.subclass(*args_, **kwargs_)
else:
return ReferenceType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Transforms(self): return self.Transforms
def set_Transforms(self, Transforms): self.Transforms = Transforms
def get_DigestMethod(self): return self.DigestMethod
def set_DigestMethod(self, DigestMethod): self.DigestMethod = DigestMethod
def get_DigestValue(self): return self.DigestValue
def set_DigestValue(self, DigestValue): self.DigestValue = DigestValue
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def get_URI(self): return self.URI
def set_URI(self, URI): self.URI = URI
def get_Type(self): return self.Type
def set_Type(self, Type): self.Type = Type
def hasContent_(self):
if (
self.Transforms is not None or
self.DigestMethod is not None or
self.DigestValue is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ReferenceType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ReferenceType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ReferenceType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ReferenceType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ReferenceType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
if self.URI is not None and 'URI' not in already_processed:
already_processed.add('URI')
outfile.write(' URI=%s' % (quote_attrib(self.URI), ))
if self.Type is not None and 'Type' not in already_processed:
already_processed.add('Type')
outfile.write(' Type=%s' % (quote_attrib(self.Type), ))
def exportChildren(self, outfile, level, namespace_='', name_='ReferenceType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Transforms is not None:
self.Transforms.export(outfile, level, namespace_='ds:', name_='Transforms', pretty_print=pretty_print)
if self.DigestMethod is not None:
self.DigestMethod.export(outfile, level, namespace_='ds:', name_='DigestMethod', pretty_print=pretty_print)
if self.DigestValue is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ds:DigestValue>%s</ds:DigestValue>%s' % (self.gds_format_base64(self.DigestValue, input_name='DigestValue'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
value = find_attr_value_('URI', node)
if value is not None and 'URI' not in already_processed:
already_processed.add('URI')
self.URI = value
value = find_attr_value_('Type', node)
if value is not None and 'Type' not in already_processed:
already_processed.add('Type')
self.Type = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Transforms':
obj_ = TransformsType.factory()
obj_.build(child_)
self.Transforms = obj_
obj_.original_tagname_ = 'Transforms'
elif nodeName_ == 'DigestMethod':
obj_ = DigestMethodType.factory()
obj_.build(child_)
self.DigestMethod = obj_
obj_.original_tagname_ = 'DigestMethod'
elif nodeName_ == 'DigestValue':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'DigestValue')
else:
bval_ = None
self.DigestValue = bval_
# end class ReferenceType
class TransformsType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Transform=None):
self.original_tagname_ = None
if Transform is None:
self.Transform = []
else:
self.Transform = Transform
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TransformsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TransformsType.subclass:
return TransformsType.subclass(*args_, **kwargs_)
else:
return TransformsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Transform(self): return self.Transform
def set_Transform(self, Transform): self.Transform = Transform
def add_Transform(self, value): self.Transform.append(value)
def insert_Transform_at(self, index, value): self.Transform.insert(index, value)
def replace_Transform_at(self, index, value): self.Transform[index] = value
def hasContent_(self):
if (
self.Transform
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TransformsType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TransformsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TransformsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TransformsType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TransformsType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TransformsType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Transform_ in self.Transform:
Transform_.export(outfile, level, namespace_='ds:', name_='Transform', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Transform':
obj_ = TransformType.factory()
obj_.build(child_)
self.Transform.append(obj_)
obj_.original_tagname_ = 'Transform'
# end class TransformsType
class TransformType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Algorithm=None, anytypeobjs_=None, XPath=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.Algorithm = _cast(None, Algorithm)
self.anytypeobjs_ = anytypeobjs_
if XPath is None:
self.XPath = []
else:
self.XPath = XPath
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TransformType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TransformType.subclass:
return TransformType.subclass(*args_, **kwargs_)
else:
return TransformType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def get_XPath(self): return self.XPath
def set_XPath(self, XPath): self.XPath = XPath
def add_XPath(self, value): self.XPath.append(value)
def insert_XPath_at(self, index, value): self.XPath.insert(index, value)
def replace_XPath_at(self, index, value): self.XPath[index] = value
def get_Algorithm(self): return self.Algorithm
def set_Algorithm(self, Algorithm): self.Algorithm = Algorithm
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.anytypeobjs_ is not None or
self.XPath or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TransformType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TransformType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TransformType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TransformType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TransformType'):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
outfile.write(' Algorithm=%s' % (quote_attrib(self.Algorithm), ))
def exportChildren(self, outfile, level, namespace_='', name_='TransformType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for XPath_ in self.XPath:
showIndent(outfile, level, pretty_print)
outfile.write('<XPath>%s</XPath>%s' % (self.gds_encode(self.gds_format_string(quote_xml(XPath_), input_name='XPath')), eol_))
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Algorithm', node)
if value is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
self.Algorithm = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
elif nodeName_ == 'XPath' and child_.text is not None:
valuestr_ = child_.text
obj_ = self.mixedclass_(MixedContainer.CategorySimple,
MixedContainer.TypeString, 'XPath', valuestr_)
self.content_.append(obj_)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class TransformType
class DigestMethodType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Algorithm=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.Algorithm = _cast(None, Algorithm)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, DigestMethodType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if DigestMethodType.subclass:
return DigestMethodType.subclass(*args_, **kwargs_)
else:
return DigestMethodType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_Algorithm(self): return self.Algorithm
def set_Algorithm(self, Algorithm): self.Algorithm = Algorithm
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.anytypeobjs_ or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='DigestMethodType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('DigestMethodType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='DigestMethodType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='DigestMethodType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='DigestMethodType'):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
outfile.write(' Algorithm=%s' % (quote_attrib(self.Algorithm), ))
def exportChildren(self, outfile, level, namespace_='', name_='DigestMethodType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Algorithm', node)
if value is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
self.Algorithm = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class DigestMethodType
class KeyInfoType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, KeyName=None, KeyValue=None, RetrievalMethod=None, X509Data=None, PGPData=None, SPKIData=None, MgmtData=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
if KeyName is None:
self.KeyName = []
else:
self.KeyName = KeyName
if KeyValue is None:
self.KeyValue = []
else:
self.KeyValue = KeyValue
if RetrievalMethod is None:
self.RetrievalMethod = []
else:
self.RetrievalMethod = RetrievalMethod
if X509Data is None:
self.X509Data = []
else:
self.X509Data = X509Data
if PGPData is None:
self.PGPData = []
else:
self.PGPData = PGPData
if SPKIData is None:
self.SPKIData = []
else:
self.SPKIData = SPKIData
if MgmtData is None:
self.MgmtData = []
else:
self.MgmtData = MgmtData
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, KeyInfoType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if KeyInfoType.subclass:
return KeyInfoType.subclass(*args_, **kwargs_)
else:
return KeyInfoType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_KeyName(self): return self.KeyName
def set_KeyName(self, KeyName): self.KeyName = KeyName
def add_KeyName(self, value): self.KeyName.append(value)
def insert_KeyName_at(self, index, value): self.KeyName.insert(index, value)
def replace_KeyName_at(self, index, value): self.KeyName[index] = value
def get_KeyValue(self): return self.KeyValue
def set_KeyValue(self, KeyValue): self.KeyValue = KeyValue
def add_KeyValue(self, value): self.KeyValue.append(value)
def insert_KeyValue_at(self, index, value): self.KeyValue.insert(index, value)
def replace_KeyValue_at(self, index, value): self.KeyValue[index] = value
def get_RetrievalMethod(self): return self.RetrievalMethod
def set_RetrievalMethod(self, RetrievalMethod): self.RetrievalMethod = RetrievalMethod
def add_RetrievalMethod(self, value): self.RetrievalMethod.append(value)
def insert_RetrievalMethod_at(self, index, value): self.RetrievalMethod.insert(index, value)
def replace_RetrievalMethod_at(self, index, value): self.RetrievalMethod[index] = value
def get_X509Data(self): return self.X509Data
def set_X509Data(self, X509Data): self.X509Data = X509Data
def add_X509Data(self, value): self.X509Data.append(value)
def insert_X509Data_at(self, index, value): self.X509Data.insert(index, value)
def replace_X509Data_at(self, index, value): self.X509Data[index] = value
def get_PGPData(self): return self.PGPData
def set_PGPData(self, PGPData): self.PGPData = PGPData
def add_PGPData(self, value): self.PGPData.append(value)
def insert_PGPData_at(self, index, value): self.PGPData.insert(index, value)
def replace_PGPData_at(self, index, value): self.PGPData[index] = value
def get_SPKIData(self): return self.SPKIData
def set_SPKIData(self, SPKIData): self.SPKIData = SPKIData
def add_SPKIData(self, value): self.SPKIData.append(value)
def insert_SPKIData_at(self, index, value): self.SPKIData.insert(index, value)
def replace_SPKIData_at(self, index, value): self.SPKIData[index] = value
def get_MgmtData(self): return self.MgmtData
def set_MgmtData(self, MgmtData): self.MgmtData = MgmtData
def add_MgmtData(self, value): self.MgmtData.append(value)
def insert_MgmtData_at(self, index, value): self.MgmtData.insert(index, value)
def replace_MgmtData_at(self, index, value): self.MgmtData[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.KeyName or
self.KeyValue or
self.RetrievalMethod or
self.X509Data or
self.PGPData or
self.SPKIData or
self.MgmtData or
self.anytypeobjs_ is not None or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='KeyInfoType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('KeyInfoType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='KeyInfoType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='KeyInfoType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='KeyInfoType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='KeyInfoType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for KeyName_ in self.KeyName:
showIndent(outfile, level, pretty_print)
outfile.write('<ds:KeyName>%s</ds:KeyName>%s' % (self.gds_encode(self.gds_format_string(quote_xml(KeyName_), input_name='KeyName')), eol_))
for KeyValue_ in self.KeyValue:
KeyValue_.export(outfile, level, namespace_='ds:', name_='KeyValue', pretty_print=pretty_print)
for RetrievalMethod_ in self.RetrievalMethod:
RetrievalMethod_.export(outfile, level, namespace_='ds:', name_='RetrievalMethod', pretty_print=pretty_print)
for X509Data_ in self.X509Data:
X509Data_.export(outfile, level, namespace_='ds:', name_='X509Data', pretty_print=pretty_print)
for PGPData_ in self.PGPData:
PGPData_.export(outfile, level, namespace_='ds:', name_='PGPData', pretty_print=pretty_print)
for SPKIData_ in self.SPKIData:
SPKIData_.export(outfile, level, namespace_='ds:', name_='SPKIData', pretty_print=pretty_print)
for MgmtData_ in self.MgmtData:
showIndent(outfile, level, pretty_print)
outfile.write('<ds:MgmtData>%s</ds:MgmtData>%s' % (self.gds_encode(self.gds_format_string(quote_xml(MgmtData_), input_name='MgmtData')), eol_))
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'KeyName' and child_.text is not None:
valuestr_ = child_.text
obj_ = self.mixedclass_(MixedContainer.CategorySimple,
MixedContainer.TypeString, 'KeyName', valuestr_)
self.content_.append(obj_)
elif nodeName_ == 'KeyValue':
obj_ = KeyValueType.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'KeyValue', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_KeyValue'):
self.add_KeyValue(obj_.value)
elif hasattr(self, 'set_KeyValue'):
self.set_KeyValue(obj_.value)
elif nodeName_ == 'RetrievalMethod':
obj_ = RetrievalMethodType.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'RetrievalMethod', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_RetrievalMethod'):
self.add_RetrievalMethod(obj_.value)
elif hasattr(self, 'set_RetrievalMethod'):
self.set_RetrievalMethod(obj_.value)
elif nodeName_ == 'X509Data':
obj_ = X509DataType.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'X509Data', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_X509Data'):
self.add_X509Data(obj_.value)
elif hasattr(self, 'set_X509Data'):
self.set_X509Data(obj_.value)
elif nodeName_ == 'PGPData':
obj_ = PGPDataType.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'PGPData', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_PGPData'):
self.add_PGPData(obj_.value)
elif hasattr(self, 'set_PGPData'):
self.set_PGPData(obj_.value)
elif nodeName_ == 'SPKIData':
obj_ = SPKIDataType.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'SPKIData', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_SPKIData'):
self.add_SPKIData(obj_.value)
elif hasattr(self, 'set_SPKIData'):
self.set_SPKIData(obj_.value)
elif nodeName_ == 'MgmtData' and child_.text is not None:
valuestr_ = child_.text
obj_ = self.mixedclass_(MixedContainer.CategorySimple,
MixedContainer.TypeString, 'MgmtData', valuestr_)
self.content_.append(obj_)
elif nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class KeyInfoType
class KeyValueType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, DSAKeyValue=None, RSAKeyValue=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.DSAKeyValue = DSAKeyValue
self.RSAKeyValue = RSAKeyValue
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, KeyValueType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if KeyValueType.subclass:
return KeyValueType.subclass(*args_, **kwargs_)
else:
return KeyValueType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_DSAKeyValue(self): return self.DSAKeyValue
def set_DSAKeyValue(self, DSAKeyValue): self.DSAKeyValue = DSAKeyValue
def get_RSAKeyValue(self): return self.RSAKeyValue
def set_RSAKeyValue(self, RSAKeyValue): self.RSAKeyValue = RSAKeyValue
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.DSAKeyValue is not None or
self.RSAKeyValue is not None or
self.anytypeobjs_ is not None or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='KeyValueType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('KeyValueType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='KeyValueType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='KeyValueType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='KeyValueType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='KeyValueType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.DSAKeyValue is not None:
self.DSAKeyValue.export(outfile, level, namespace_='ds:', name_='DSAKeyValue', pretty_print=pretty_print)
if self.RSAKeyValue is not None:
self.RSAKeyValue.export(outfile, level, namespace_='ds:', name_='RSAKeyValue', pretty_print=pretty_print)
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'DSAKeyValue':
obj_ = DSAKeyValueType.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'DSAKeyValue', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_DSAKeyValue'):
self.add_DSAKeyValue(obj_.value)
elif hasattr(self, 'set_DSAKeyValue'):
self.set_DSAKeyValue(obj_.value)
elif nodeName_ == 'RSAKeyValue':
obj_ = RSAKeyValueType.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'RSAKeyValue', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_RSAKeyValue'):
self.add_RSAKeyValue(obj_.value)
elif hasattr(self, 'set_RSAKeyValue'):
self.set_RSAKeyValue(obj_.value)
elif nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class KeyValueType
class RetrievalMethodType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, URI=None, Type=None, Transforms=None):
self.original_tagname_ = None
self.URI = _cast(None, URI)
self.Type = _cast(None, Type)
self.Transforms = Transforms
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, RetrievalMethodType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if RetrievalMethodType.subclass:
return RetrievalMethodType.subclass(*args_, **kwargs_)
else:
return RetrievalMethodType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Transforms(self): return self.Transforms
def set_Transforms(self, Transforms): self.Transforms = Transforms
def get_URI(self): return self.URI
def set_URI(self, URI): self.URI = URI
def get_Type(self): return self.Type
def set_Type(self, Type): self.Type = Type
def hasContent_(self):
if (
self.Transforms is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='RetrievalMethodType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('RetrievalMethodType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='RetrievalMethodType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='RetrievalMethodType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='RetrievalMethodType'):
if self.URI is not None and 'URI' not in already_processed:
already_processed.add('URI')
outfile.write(' URI=%s' % (quote_attrib(self.URI), ))
if self.Type is not None and 'Type' not in already_processed:
already_processed.add('Type')
outfile.write(' Type=%s' % (quote_attrib(self.Type), ))
def exportChildren(self, outfile, level, namespace_='', name_='RetrievalMethodType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Transforms is not None:
self.Transforms.export(outfile, level, namespace_='ds:', name_='Transforms', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('URI', node)
if value is not None and 'URI' not in already_processed:
already_processed.add('URI')
self.URI = value
value = find_attr_value_('Type', node)
if value is not None and 'Type' not in already_processed:
already_processed.add('Type')
self.Type = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Transforms':
obj_ = TransformsType.factory()
obj_.build(child_)
self.Transforms = obj_
obj_.original_tagname_ = 'Transforms'
# end class RetrievalMethodType
class X509DataType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, X509IssuerSerial=None, X509SKI=None, X509SubjectName=None, X509Certificate=None, X509CRL=None, anytypeobjs_=None):
self.original_tagname_ = None
if X509IssuerSerial is None:
self.X509IssuerSerial = []
else:
self.X509IssuerSerial = X509IssuerSerial
if X509SKI is None:
self.X509SKI = []
else:
self.X509SKI = X509SKI
if X509SubjectName is None:
self.X509SubjectName = []
else:
self.X509SubjectName = X509SubjectName
if X509Certificate is None:
self.X509Certificate = []
else:
self.X509Certificate = X509Certificate
if X509CRL is None:
self.X509CRL = []
else:
self.X509CRL = X509CRL
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, X509DataType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if X509DataType.subclass:
return X509DataType.subclass(*args_, **kwargs_)
else:
return X509DataType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_X509IssuerSerial(self): return self.X509IssuerSerial
def set_X509IssuerSerial(self, X509IssuerSerial): self.X509IssuerSerial = X509IssuerSerial
def add_X509IssuerSerial(self, value): self.X509IssuerSerial.append(value)
def insert_X509IssuerSerial_at(self, index, value): self.X509IssuerSerial.insert(index, value)
def replace_X509IssuerSerial_at(self, index, value): self.X509IssuerSerial[index] = value
def get_X509SKI(self): return self.X509SKI
def set_X509SKI(self, X509SKI): self.X509SKI = X509SKI
def add_X509SKI(self, value): self.X509SKI.append(value)
def insert_X509SKI_at(self, index, value): self.X509SKI.insert(index, value)
def replace_X509SKI_at(self, index, value): self.X509SKI[index] = value
def get_X509SubjectName(self): return self.X509SubjectName
def set_X509SubjectName(self, X509SubjectName): self.X509SubjectName = X509SubjectName
def add_X509SubjectName(self, value): self.X509SubjectName.append(value)
def insert_X509SubjectName_at(self, index, value): self.X509SubjectName.insert(index, value)
def replace_X509SubjectName_at(self, index, value): self.X509SubjectName[index] = value
def get_X509Certificate(self): return self.X509Certificate
def set_X509Certificate(self, X509Certificate): self.X509Certificate = X509Certificate
def add_X509Certificate(self, value): self.X509Certificate.append(value)
def insert_X509Certificate_at(self, index, value): self.X509Certificate.insert(index, value)
def replace_X509Certificate_at(self, index, value): self.X509Certificate[index] = value
def get_X509CRL(self): return self.X509CRL
def set_X509CRL(self, X509CRL): self.X509CRL = X509CRL
def add_X509CRL(self, value): self.X509CRL.append(value)
def insert_X509CRL_at(self, index, value): self.X509CRL.insert(index, value)
def replace_X509CRL_at(self, index, value): self.X509CRL[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def hasContent_(self):
if (
self.X509IssuerSerial or
self.X509SKI or
self.X509SubjectName or
self.X509Certificate or
self.X509CRL or
self.anytypeobjs_ is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='X509DataType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('X509DataType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='X509DataType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='X509DataType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='X509DataType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='X509DataType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for X509IssuerSerial_ in self.X509IssuerSerial:
X509IssuerSerial_.export(outfile, level, namespace_, name_='X509IssuerSerial', pretty_print=pretty_print)
for X509SKI_ in self.X509SKI:
showIndent(outfile, level, pretty_print)
outfile.write('<X509SKI>%s</X509SKI>%s' % (self.gds_encode(self.gds_format_string(quote_xml(X509SKI_), input_name='X509SKI')), eol_))
for X509SubjectName_ in self.X509SubjectName:
showIndent(outfile, level, pretty_print)
outfile.write('<X509SubjectName>%s</X509SubjectName>%s' % (self.gds_encode(self.gds_format_string(quote_xml(X509SubjectName_), input_name='X509SubjectName')), eol_))
for X509Certificate_ in self.X509Certificate:
showIndent(outfile, level, pretty_print)
outfile.write('<X509Certificate>%s</X509Certificate>%s' % (self.gds_encode(self.gds_format_string(quote_xml(X509Certificate_), input_name='X509Certificate')), eol_))
for X509CRL_ in self.X509CRL:
showIndent(outfile, level, pretty_print)
outfile.write('<X509CRL>%s</X509CRL>%s' % (self.gds_encode(self.gds_format_string(quote_xml(X509CRL_), input_name='X509CRL')), eol_))
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'X509IssuerSerial':
obj_ = X509IssuerSerialType.factory()
obj_.build(child_)
self.X509IssuerSerial.append(obj_)
obj_.original_tagname_ = 'X509IssuerSerial'
elif nodeName_ == 'X509SKI':
X509SKI_ = child_.text
X509SKI_ = self.gds_validate_string(X509SKI_, node, 'X509SKI')
self.X509SKI.append(X509SKI_)
elif nodeName_ == 'X509SubjectName':
X509SubjectName_ = child_.text
X509SubjectName_ = self.gds_validate_string(X509SubjectName_, node, 'X509SubjectName')
self.X509SubjectName.append(X509SubjectName_)
elif nodeName_ == 'X509Certificate':
X509Certificate_ = child_.text
X509Certificate_ = self.gds_validate_string(X509Certificate_, node, 'X509Certificate')
self.X509Certificate.append(X509Certificate_)
elif nodeName_ == 'X509CRL':
X509CRL_ = child_.text
X509CRL_ = self.gds_validate_string(X509CRL_, node, 'X509CRL')
self.X509CRL.append(X509CRL_)
else:
obj_ = self.gds_build_any(child_, 'X509DataType')
if obj_ is not None:
self.set_anytypeobjs_(obj_)
# end class X509DataType
class X509IssuerSerialType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, X509IssuerName=None, X509SerialNumber=None):
self.original_tagname_ = None
self.X509IssuerName = X509IssuerName
self.X509SerialNumber = X509SerialNumber
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, X509IssuerSerialType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if X509IssuerSerialType.subclass:
return X509IssuerSerialType.subclass(*args_, **kwargs_)
else:
return X509IssuerSerialType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_X509IssuerName(self): return self.X509IssuerName
def set_X509IssuerName(self, X509IssuerName): self.X509IssuerName = X509IssuerName
def get_X509SerialNumber(self): return self.X509SerialNumber
def set_X509SerialNumber(self, X509SerialNumber): self.X509SerialNumber = X509SerialNumber
def hasContent_(self):
if (
self.X509IssuerName is not None or
self.X509SerialNumber is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='X509IssuerSerialType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('X509IssuerSerialType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='X509IssuerSerialType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='X509IssuerSerialType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='X509IssuerSerialType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='X509IssuerSerialType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.X509IssuerName is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<X509IssuerName>%s</X509IssuerName>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.X509IssuerName), input_name='X509IssuerName')), eol_))
if self.X509SerialNumber is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<X509SerialNumber>%s</X509SerialNumber>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.X509SerialNumber), input_name='X509SerialNumber')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'X509IssuerName':
X509IssuerName_ = child_.text
X509IssuerName_ = self.gds_validate_string(X509IssuerName_, node, 'X509IssuerName')
self.X509IssuerName = X509IssuerName_
elif nodeName_ == 'X509SerialNumber':
X509SerialNumber_ = child_.text
X509SerialNumber_ = self.gds_validate_string(X509SerialNumber_, node, 'X509SerialNumber')
self.X509SerialNumber = X509SerialNumber_
# end class X509IssuerSerialType
class PGPDataType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, PGPKeyID=None, PGPKeyPacket=None, anytypeobjs_=None):
self.original_tagname_ = None
self.PGPKeyID = PGPKeyID
self.PGPKeyPacket = PGPKeyPacket
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, PGPDataType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if PGPDataType.subclass:
return PGPDataType.subclass(*args_, **kwargs_)
else:
return PGPDataType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_PGPKeyID(self): return self.PGPKeyID
def set_PGPKeyID(self, PGPKeyID): self.PGPKeyID = PGPKeyID
def get_PGPKeyPacket(self): return self.PGPKeyPacket
def set_PGPKeyPacket(self, PGPKeyPacket): self.PGPKeyPacket = PGPKeyPacket
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def hasContent_(self):
if (
self.PGPKeyID is not None or
self.PGPKeyPacket is not None or
self.anytypeobjs_
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='PGPDataType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('PGPDataType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='PGPDataType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='PGPDataType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='PGPDataType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='PGPDataType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.PGPKeyID is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<PGPKeyID>%s</PGPKeyID>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.PGPKeyID), input_name='PGPKeyID')), eol_))
if self.PGPKeyPacket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<PGPKeyPacket>%s</PGPKeyPacket>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.PGPKeyPacket), input_name='PGPKeyPacket')), eol_))
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'PGPKeyID':
PGPKeyID_ = child_.text
PGPKeyID_ = self.gds_validate_string(PGPKeyID_, node, 'PGPKeyID')
self.PGPKeyID = PGPKeyID_
elif nodeName_ == 'PGPKeyPacket':
PGPKeyPacket_ = child_.text
PGPKeyPacket_ = self.gds_validate_string(PGPKeyPacket_, node, 'PGPKeyPacket')
self.PGPKeyPacket = PGPKeyPacket_
else:
obj_ = self.gds_build_any(child_, 'PGPDataType')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
# end class PGPDataType
class SPKIDataType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, SPKISexp=None, anytypeobjs_=None):
self.original_tagname_ = None
if SPKISexp is None:
self.SPKISexp = []
else:
self.SPKISexp = SPKISexp
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SPKIDataType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SPKIDataType.subclass:
return SPKIDataType.subclass(*args_, **kwargs_)
else:
return SPKIDataType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_SPKISexp(self): return self.SPKISexp
def set_SPKISexp(self, SPKISexp): self.SPKISexp = SPKISexp
def add_SPKISexp(self, value): self.SPKISexp.append(value)
def insert_SPKISexp_at(self, index, value): self.SPKISexp.insert(index, value)
def replace_SPKISexp_at(self, index, value): self.SPKISexp[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def hasContent_(self):
if (
self.SPKISexp or
self.anytypeobjs_ is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='SPKIDataType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SPKIDataType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SPKIDataType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='SPKIDataType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SPKIDataType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='SPKIDataType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for SPKISexp_ in self.SPKISexp:
showIndent(outfile, level, pretty_print)
outfile.write('<SPKISexp>%s</SPKISexp>%s' % (self.gds_encode(self.gds_format_string(quote_xml(SPKISexp_), input_name='SPKISexp')), eol_))
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'SPKISexp':
SPKISexp_ = child_.text
SPKISexp_ = self.gds_validate_string(SPKISexp_, node, 'SPKISexp')
self.SPKISexp.append(SPKISexp_)
else:
obj_ = self.gds_build_any(child_, 'SPKIDataType')
if obj_ is not None:
self.set_anytypeobjs_(obj_)
# end class SPKIDataType
class ObjectType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, MimeType=None, Encoding=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.MimeType = _cast(None, MimeType)
self.Encoding = _cast(None, Encoding)
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ObjectType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ObjectType.subclass:
return ObjectType.subclass(*args_, **kwargs_)
else:
return ObjectType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def get_MimeType(self): return self.MimeType
def set_MimeType(self, MimeType): self.MimeType = MimeType
def get_Encoding(self): return self.Encoding
def set_Encoding(self, Encoding): self.Encoding = Encoding
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.anytypeobjs_ is not None or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ObjectType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ObjectType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ObjectType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ObjectType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ObjectType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
if self.MimeType is not None and 'MimeType' not in already_processed:
already_processed.add('MimeType')
outfile.write(' MimeType=%s' % (quote_attrib(self.MimeType), ))
if self.Encoding is not None and 'Encoding' not in already_processed:
already_processed.add('Encoding')
outfile.write(' Encoding=%s' % (quote_attrib(self.Encoding), ))
def exportChildren(self, outfile, level, namespace_='', name_='ObjectType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
value = find_attr_value_('MimeType', node)
if value is not None and 'MimeType' not in already_processed:
already_processed.add('MimeType')
self.MimeType = value
value = find_attr_value_('Encoding', node)
if value is not None and 'Encoding' not in already_processed:
already_processed.add('Encoding')
self.Encoding = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class ObjectType
class ManifestType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, Reference=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
if Reference is None:
self.Reference = []
else:
self.Reference = Reference
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ManifestType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ManifestType.subclass:
return ManifestType.subclass(*args_, **kwargs_)
else:
return ManifestType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Reference(self): return self.Reference
def set_Reference(self, Reference): self.Reference = Reference
def add_Reference(self, value): self.Reference.append(value)
def insert_Reference_at(self, index, value): self.Reference.insert(index, value)
def replace_Reference_at(self, index, value): self.Reference[index] = value
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def hasContent_(self):
if (
self.Reference
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ManifestType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ManifestType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ManifestType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ManifestType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ManifestType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='ManifestType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Reference_ in self.Reference:
Reference_.export(outfile, level, namespace_='ds:', name_='Reference', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Reference':
obj_ = ReferenceType.factory()
obj_.build(child_)
self.Reference.append(obj_)
obj_.original_tagname_ = 'Reference'
# end class ManifestType
class SignaturePropertiesType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, SignatureProperty=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
if SignatureProperty is None:
self.SignatureProperty = []
else:
self.SignatureProperty = SignatureProperty
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignaturePropertiesType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignaturePropertiesType.subclass:
return SignaturePropertiesType.subclass(*args_, **kwargs_)
else:
return SignaturePropertiesType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_SignatureProperty(self): return self.SignatureProperty
def set_SignatureProperty(self, SignatureProperty): self.SignatureProperty = SignatureProperty
def add_SignatureProperty(self, value): self.SignatureProperty.append(value)
def insert_SignatureProperty_at(self, index, value): self.SignatureProperty.insert(index, value)
def replace_SignatureProperty_at(self, index, value): self.SignatureProperty[index] = value
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def hasContent_(self):
if (
self.SignatureProperty
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='SignaturePropertiesType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignaturePropertiesType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignaturePropertiesType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='SignaturePropertiesType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SignaturePropertiesType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='SignaturePropertiesType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for SignatureProperty_ in self.SignatureProperty:
SignatureProperty_.export(outfile, level, namespace_='ds:', name_='SignatureProperty', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'SignatureProperty':
obj_ = SignaturePropertyType.factory()
obj_.build(child_)
self.SignatureProperty.append(obj_)
obj_.original_tagname_ = 'SignatureProperty'
# end class SignaturePropertiesType
class SignaturePropertyType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Target=None, Id=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.Target = _cast(None, Target)
self.Id = _cast(None, Id)
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignaturePropertyType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignaturePropertyType.subclass:
return SignaturePropertyType.subclass(*args_, **kwargs_)
else:
return SignaturePropertyType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def get_Target(self): return self.Target
def set_Target(self, Target): self.Target = Target
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.anytypeobjs_ is not None or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='SignaturePropertyType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignaturePropertyType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignaturePropertyType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='SignaturePropertyType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SignaturePropertyType'):
if self.Target is not None and 'Target' not in already_processed:
already_processed.add('Target')
outfile.write(' Target=%s' % (quote_attrib(self.Target), ))
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='SignaturePropertyType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Target', node)
if value is not None and 'Target' not in already_processed:
already_processed.add('Target')
self.Target = value
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class SignaturePropertyType
class DSAKeyValueType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, P=None, Q=None, G=None, Y=None, J=None, Seed=None, PgenCounter=None):
self.original_tagname_ = None
self.P = P
self.validate_CryptoBinary(self.P)
self.Q = Q
self.validate_CryptoBinary(self.Q)
self.G = G
self.validate_CryptoBinary(self.G)
self.Y = Y
self.validate_CryptoBinary(self.Y)
self.J = J
self.validate_CryptoBinary(self.J)
self.Seed = Seed
self.validate_CryptoBinary(self.Seed)
self.PgenCounter = PgenCounter
self.validate_CryptoBinary(self.PgenCounter)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, DSAKeyValueType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if DSAKeyValueType.subclass:
return DSAKeyValueType.subclass(*args_, **kwargs_)
else:
return DSAKeyValueType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_P(self): return self.P
def set_P(self, P): self.P = P
def get_Q(self): return self.Q
def set_Q(self, Q): self.Q = Q
def get_G(self): return self.G
def set_G(self, G): self.G = G
def get_Y(self): return self.Y
def set_Y(self, Y): self.Y = Y
def get_J(self): return self.J
def set_J(self, J): self.J = J
def get_Seed(self): return self.Seed
def set_Seed(self, Seed): self.Seed = Seed
def get_PgenCounter(self): return self.PgenCounter
def set_PgenCounter(self, PgenCounter): self.PgenCounter = PgenCounter
def validate_CryptoBinary(self, value):
# Validate type CryptoBinary, a restriction on base64Binary.
if value is not None and Validate_simpletypes_:
pass
def hasContent_(self):
if (
self.P is not None or
self.Q is not None or
self.G is not None or
self.Y is not None or
self.J is not None or
self.Seed is not None or
self.PgenCounter is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='DSAKeyValueType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('DSAKeyValueType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='DSAKeyValueType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='DSAKeyValueType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='DSAKeyValueType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='DSAKeyValueType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.P is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<P>%s</P>%s' % (self.gds_format_base64(self.P, input_name='P'), eol_))
if self.Q is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Q>%s</Q>%s' % (self.gds_format_base64(self.Q, input_name='Q'), eol_))
if self.G is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<G>%s</G>%s' % (self.gds_format_base64(self.G, input_name='G'), eol_))
if self.Y is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Y>%s</Y>%s' % (self.gds_format_base64(self.Y, input_name='Y'), eol_))
if self.J is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<J>%s</J>%s' % (self.gds_format_base64(self.J, input_name='J'), eol_))
if self.Seed is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Seed>%s</Seed>%s' % (self.gds_format_base64(self.Seed, input_name='Seed'), eol_))
if self.PgenCounter is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<PgenCounter>%s</PgenCounter>%s' % (self.gds_format_base64(self.PgenCounter, input_name='PgenCounter'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'P':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'P')
else:
bval_ = None
self.P = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.P)
elif nodeName_ == 'Q':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Q')
else:
bval_ = None
self.Q = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.Q)
elif nodeName_ == 'G':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'G')
else:
bval_ = None
self.G = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.G)
elif nodeName_ == 'Y':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Y')
else:
bval_ = None
self.Y = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.Y)
elif nodeName_ == 'J':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'J')
else:
bval_ = None
self.J = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.J)
elif nodeName_ == 'Seed':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Seed')
else:
bval_ = None
self.Seed = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.Seed)
elif nodeName_ == 'PgenCounter':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'PgenCounter')
else:
bval_ = None
self.PgenCounter = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.PgenCounter)
# end class DSAKeyValueType
class RSAKeyValueType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Modulus=None, Exponent=None):
self.original_tagname_ = None
self.Modulus = Modulus
self.validate_CryptoBinary(self.Modulus)
self.Exponent = Exponent
self.validate_CryptoBinary(self.Exponent)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, RSAKeyValueType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if RSAKeyValueType.subclass:
return RSAKeyValueType.subclass(*args_, **kwargs_)
else:
return RSAKeyValueType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Modulus(self): return self.Modulus
def set_Modulus(self, Modulus): self.Modulus = Modulus
def get_Exponent(self): return self.Exponent
def set_Exponent(self, Exponent): self.Exponent = Exponent
def validate_CryptoBinary(self, value):
# Validate type CryptoBinary, a restriction on base64Binary.
if value is not None and Validate_simpletypes_:
pass
def hasContent_(self):
if (
self.Modulus is not None or
self.Exponent is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='RSAKeyValueType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('RSAKeyValueType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='RSAKeyValueType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='RSAKeyValueType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='RSAKeyValueType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='RSAKeyValueType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Modulus is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Modulus>%s</Modulus>%s' % (self.gds_format_base64(self.Modulus, input_name='Modulus'), eol_))
if self.Exponent is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Exponent>%s</Exponent>%s' % (self.gds_format_base64(self.Exponent, input_name='Exponent'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Modulus':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Modulus')
else:
bval_ = None
self.Modulus = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.Modulus)
elif nodeName_ == 'Exponent':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Exponent')
else:
bval_ = None
self.Exponent = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.Exponent)
# end class RSAKeyValueType
class PeriodoEmissaoType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, DataInicial=None, DataFinal=None):
self.original_tagname_ = None
if isinstance(DataInicial, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DataInicial, '%Y-%m-%d').date()
else:
initvalue_ = DataInicial
self.DataInicial = initvalue_
if isinstance(DataFinal, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DataFinal, '%Y-%m-%d').date()
else:
initvalue_ = DataFinal
self.DataFinal = initvalue_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, PeriodoEmissaoType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if PeriodoEmissaoType.subclass:
return PeriodoEmissaoType.subclass(*args_, **kwargs_)
else:
return PeriodoEmissaoType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_DataInicial(self): return self.DataInicial
def set_DataInicial(self, DataInicial): self.DataInicial = DataInicial
def get_DataFinal(self): return self.DataFinal
def set_DataFinal(self, DataFinal): self.DataFinal = DataFinal
def hasContent_(self):
if (
self.DataInicial is not None or
self.DataFinal is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='PeriodoEmissaoType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('PeriodoEmissaoType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='PeriodoEmissaoType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='PeriodoEmissaoType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='PeriodoEmissaoType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='PeriodoEmissaoType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.DataInicial is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<DataInicial>%s</DataInicial>%s' % (self.gds_format_date(self.DataInicial, input_name='DataInicial'), eol_))
if self.DataFinal is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<DataFinal>%s</DataFinal>%s' % (self.gds_format_date(self.DataFinal, input_name='DataFinal'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'DataInicial':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.DataInicial = dval_
elif nodeName_ == 'DataFinal':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.DataFinal = dval_
# end class PeriodoEmissaoType
class ListaRpsType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Rps=None):
self.original_tagname_ = None
if Rps is None:
self.Rps = []
else:
self.Rps = Rps
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ListaRpsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ListaRpsType.subclass:
return ListaRpsType.subclass(*args_, **kwargs_)
else:
return ListaRpsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Rps(self): return self.Rps
def set_Rps(self, Rps): self.Rps = Rps
def add_Rps(self, value): self.Rps.append(value)
def insert_Rps_at(self, index, value): self.Rps.insert(index, value)
def replace_Rps_at(self, index, value): self.Rps[index] = value
def hasContent_(self):
if (
self.Rps
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ListaRpsType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ListaRpsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ListaRpsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ListaRpsType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ListaRpsType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='ListaRpsType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Rps_ in self.Rps:
Rps_.export(outfile, level, namespace_, name_='Rps', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Rps':
obj_ = tcRps.factory()
obj_.build(child_)
self.Rps.append(obj_)
obj_.original_tagname_ = 'Rps'
# end class ListaRpsType
GDSClassesMapping = {
}
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
"""
def usage():
print(USAGE_TEXT)
sys.exit(1)
def get_root_tag(node):
tag = Tag_pattern_.match(node.tag).groups()[-1]
rootClass = GDSClassesMapping.get(tag)
if rootClass is None:
rootClass = globals().get(tag)
return tag, rootClass
def parse(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'ConsultarNfseEnvio'
rootClass = ConsultarNfseEnvio
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='',
pretty_print=True)
return rootObj
def parseEtree(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'ConsultarNfseEnvio'
rootClass = ConsultarNfseEnvio
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
mapping = {}
rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping)
reverse_mapping = rootObj.gds_reverse_node_mapping(mapping)
if not silence:
content = etree_.tostring(
rootElement, pretty_print=True,
xml_declaration=True, encoding="utf-8")
sys.stdout.write(content)
sys.stdout.write('\n')
return rootObj, rootElement, mapping, reverse_mapping
def parseString(inString, silence=False):
if sys.version_info.major == 2:
from StringIO import StringIO as IOBuffer
else:
from io import BytesIO as IOBuffer
parser = None
doc = parsexml_(IOBuffer(inString), parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'ConsultarNfseEnvio'
rootClass = ConsultarNfseEnvio
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='')
return rootObj
def parseLiteral(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'ConsultarNfseEnvio'
rootClass = ConsultarNfseEnvio
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('#from servico_consultar_nfse_envio_v03 import *\n\n')
sys.stdout.write('import servico_consultar_nfse_envio_v03 as model_\n\n')
sys.stdout.write('rootObj = model_.rootClass(\n')
rootObj.exportLiteral(sys.stdout, 0, name_=rootTag)
sys.stdout.write(')\n')
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
__all__ = [
"CanonicalizationMethodType",
"ConsultarNfseEnvio",
"DSAKeyValueType",
"DigestMethodType",
"KeyInfoType",
"KeyValueType",
"ListaMensagemRetorno",
"ListaRpsType",
"ManifestType",
"ObjectType",
"PGPDataType",
"PeriodoEmissaoType",
"RSAKeyValueType",
"ReferenceType",
"RetrievalMethodType",
"SPKIDataType",
"SignatureMethodType",
"SignaturePropertiesType",
"SignaturePropertyType",
"SignatureType",
"SignatureValueType",
"SignedInfoType",
"TransformType",
"TransformsType",
"X509DataType",
"X509IssuerSerialType",
"tcCancelamentoNfse",
"tcCompNfse",
"tcConfirmacaoCancelamento",
"tcContato",
"tcCpfCnpj",
"tcDadosConstrucaoCivil",
"tcDadosPrestador",
"tcDadosServico",
"tcDadosTomador",
"tcEndereco",
"tcIdentificacaoIntermediarioServico",
"tcIdentificacaoNfse",
"tcIdentificacaoOrgaoGerador",
"tcIdentificacaoPrestador",
"tcIdentificacaoRps",
"tcIdentificacaoTomador",
"tcInfConfirmacaoCancelamento",
"tcInfNfse",
"tcInfPedidoCancelamento",
"tcInfRps",
"tcInfSubstituicaoNfse",
"tcLoteRps",
"tcMensagemRetorno",
"tcMensagemRetornoLote",
"tcNfse",
"tcPedidoCancelamento",
"tcRps",
"tcSubstituicaoNfse",
"tcValores"
]
| [
"[email protected]"
] | |
a59222150d7b44cad7f9073542b3a3d9527d9baa | e6ec89f4e40b6ef7183ef76bf542f683154dea03 | /django_tutorial/settings.py | 5970db695f93dc3410ab4bc3fb20d3ca7ab6c0e1 | [] | no_license | shoark7/django-2.0-tutorial | a07919048dd22f5fe295713f6e46003ab8b4a57a | ccdae9ebc3d383145b0aa19227ff986b8d06cf93 | refs/heads/master | 2021-04-26T23:57:53.816519 | 2018-03-06T07:21:36 | 2018-03-06T07:21:36 | 123,886,493 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,153 | py | """
Django settings for django_tutorial project.
Generated by 'django-admin startproject' using Django 2.0.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'v&)bevqcv7(8xre6%qy*%a4imbut_5@ndwfeegkqhr3gu)a4$f'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'polls.apps.PollsConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'django_tutorial.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'django_tutorial.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'ko-kr'
TIME_ZONE = 'Asia/Seoul'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
] | |
a24a8cb7958e6472091d3698f75ebcb279a536e7 | f115984d89ee91e1fefa7bd0546f60db251dfee6 | /model-cell-experiments/predict-mc.py | 6504f4c534af5e600136a6c43be59dcfafaca1e8 | [
"BSD-3-Clause"
] | permissive | CardiacModelling/VoltageClampModel | f483fc3ad2129f75e377df210b9b91b1cdcb7565 | f30271da75e3c70526e53fb51dc12b317ab3b714 | refs/heads/master | 2023-07-05T10:07:59.771334 | 2021-03-03T11:05:35 | 2021-03-03T11:05:35 | 227,666,074 | 3 | 0 | BSD-3-Clause | 2021-03-03T11:04:46 | 2019-12-12T17:58:13 | Python | UTF-8 | Python | false | false | 4,079 | py | #!/usr/bin/env python3
from __future__ import print_function
import sys
sys.path.append('../lib/')
import os
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import pints
import model as m; m.vhold = 0
"""
Prediction for single model cell experiment data
"""
predict_list = ['staircase', 'sinewave', 'ap-beattie', 'ap-lei']
data_idx = {'staircase': 1, 'sinewave': 0, 'ap-beattie': 2, 'ap-lei': 3}
protocol_list = {
'staircase': 'staircase-ramp.csv',
'sinewave': 'sinewave-ramp.csv',
'ap-beattie': 'ap-beattie.csv',
'ap-lei': 'ap-lei.csv'}
legend_ncol = {
'staircase': (2, 1),
'sinewave': (1, 1),
'ap-beattie': (4, 2),
'ap-lei': (4, 2)}
try:
which_predict = sys.argv[1]
except:
print('Usage: python %s [str:which_predict]' % os.path.basename(__file__))
sys.exit()
if which_predict not in predict_list:
raise ValueError('Input data %s is not available in the predict list' \
% which_predict)
savedir = './figs'
if not os.path.isdir(savedir):
os.makedirs(savedir)
# Load data
path2data = '../../model-cell-dataset/'
sys.path.append(path2data)
import util
idx = [0, data_idx[which_predict], 0]
f = 'data/20191002_mc_nocomp.dat'
whole_data, times = util.load(f, idx, vccc=True)
if which_predict == 'staircase':
to_plot = np.where(times < 15.2)[0]
for i in range(len(whole_data)):
whole_data[i] = whole_data[i][to_plot]
times = times[to_plot]
times = times * 1e3 # s -> ms
data_cc = whole_data[2] * 1e3 # V -> mV
data_vc = whole_data[1] * 1e3 # V -> mV
data = (whole_data[0] + whole_data[3]) * 1e12 # A -> pA
#out = np.array([times * 1e-3, data_vc]).T
#np.savetxt('recorded-voltage.csv', out, delimiter=',', comments='',
# header='\"time\",\"voltage\"')
saveas = 'mcnocomp'
# Model
model = m.Model('../mmt-model-files/full2-voltage-clamp-mc.mmt',
protocol_def=protocol_list[which_predict],
temperature=273.15 + 23.0, # K
transform=None,
readout='voltageclamp.Iout',
useFilterCap=False)
parameters = [
'mc.g',
'voltageclamp.cprs',
'membrane.cm',
'voltageclamp.rseries',
'voltageclamp.voffset_eff',
]
model.set_parameters(parameters)
parameter_to_fix = [
'voltageclamp.cprs_est',
'voltageclamp.cm_est',
'voltageclamp.rseries_est',
]
parameter_to_fix_values = [
0., # pF; Cprs*
0.0, # pF; Cm*
0, # GOhm; Rs*
]
fix_p = {}
for i, j in zip(parameter_to_fix, parameter_to_fix_values):
fix_p[i] = j
model.set_fix_parameters(fix_p)
# Load parameters
loaddir = './out'
loadas = 'mcnocomp'
fit_seed = 542811797
p = np.loadtxt('%s/%s-solution-%s-1.txt' % (loaddir, loadas, fit_seed))
current_label = 'Fit' if which_predict == 'staircase' else 'Prediction'
# Simulate
extra_log = ['voltageclamp.Vc', 'membrane.V']
simulation = model.simulate(p, times, extra_log=extra_log)
Iout = simulation['voltageclamp.Iout']
Vc = simulation['voltageclamp.Vc']
Vm = simulation['membrane.V']
# Plot
fig, axes = plt.subplots(2, 1, sharex=True, figsize=(14, 4))
axes[0].plot(times, data_vc, c='#a6bddb', label=r'Measured $V_{cmd}$')
axes[0].plot(times, data_cc, c='#feb24c', label=r'Measured $V_{m}$')
axes[0].plot(times, Vc, ls='--', c='#045a8d', label=r'Input $V_{cmd}$')
axes[0].plot(times, Vm, ls='--', c='#bd0026', label=r'Predicted $V_{m}$')
axes[0].set_ylabel('Voltage (mV)', fontsize=14)
#axes[0].set_xticks([])
axes[0].legend(ncol=legend_ncol[which_predict][0])
axes[1].plot(times, data, alpha=0.5, label='Measurement')
axes[1].plot(times, Iout, ls='--', label=current_label)
axes[1].set_ylim([-800, 1200]) # TODO?
axes[1].legend(ncol=legend_ncol[which_predict][1])
axes[1].set_ylabel('Current (pA)', fontsize=14)
axes[1].set_xlabel('Time (ms)', fontsize=14)
plt.subplots_adjust(hspace=0)
plt.savefig('%s/predict-%s-%s.pdf' % (savedir, saveas, which_predict),
format='pdf', bbox_inches='tight')
plt.savefig('%s/predict-%s-%s' % (savedir, saveas, which_predict), dpi=300,
bbox_inches='tight')
plt.close()
| [
"[email protected]"
] | |
0623f1cd7027bc649a34f739367d490341d6121d | 999f3f3da1cb70cb5872f99a09d65d7c4df71cf7 | /src/data/1155.py | 3ea4cafb57081a9c8ff3f3a81e384b01c09e630f | [
"MIT"
] | permissive | NULLCT/LOMC | 0f0d1f01cce1d5633e239d411565ac7f0c687955 | 79a16474a8f21310e0fb47e536d527dd5dc6d655 | refs/heads/main | 2023-07-27T17:03:46.703022 | 2021-09-04T08:58:45 | 2021-09-04T08:58:45 | 396,290,713 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,621 | py | def divisors(M):
d = []
i = 1
while M >= i**2:
if M % i == 0:
d.append(i)
if i**2 != M:
d.append(M // i)
i = i + 1
return d
def popcount(x):
x = x - ((x >> 1) & 0x55555555)
x = (x & 0x33333333) + ((x >> 2) & 0x33333333)
x = (x + (x >> 4)) & 0x0f0f0f0f
x = x + (x >> 8)
x = x + (x >> 16)
return x & 0x0000007f
def eratosthenes(n):
res = [0 for i in range(n + 1)]
prime = set([])
for i in range(2, n + 1):
if not res[i]:
prime.add(i)
for j in range(1, n // i + 1):
res[i * j] = 1
return prime
def factorization(n):
res = []
for p in prime:
if n % p == 0:
while n % p == 0:
n //= p
res.append(p)
if n != 1:
res.append(n)
return res
def euler_phi(n):
res = n
for x in range(2, n + 1):
if x**2 > n:
break
if n % x == 0:
res = res // x * (x - 1)
while n % x == 0:
n //= x
if n != 1:
res = res // n * (n - 1)
return res
def ind(b, n):
res = 0
while n % b == 0:
res += 1
n //= b
return res
def isPrimeMR(n):
d = n - 1
d = d // (d & -d)
L = [2, 3, 5, 7, 11, 13, 17]
for a in L:
t = d
y = pow(a, t, n)
if y == 1: continue
while y != n - 1:
y = (y * y) % n
if y == 1 or t == n - 1: return 0
t <<= 1
return 1
def findFactorRho(n):
from math import gcd
m = 1 << n.bit_length() // 8
for c in range(1, 99):
f = lambda x: (x * x + c) % n
y, r, q, g = 2, 1, 1, 1
while g == 1:
x = y
for i in range(r):
y = f(y)
k = 0
while k < r and g == 1:
ys = y
for i in range(min(m, r - k)):
y = f(y)
q = q * abs(x - y) % n
g = gcd(q, n)
k += m
r <<= 1
if g == n:
g = 1
while g == 1:
ys = f(ys)
g = gcd(abs(x - ys), n)
if g < n:
if isPrimeMR(g): return g
elif isPrimeMR(n // g): return n // g
return findFactorRho(g)
def primeFactor(n):
i = 2
ret = {}
rhoFlg = 0
while i * i <= n:
k = 0
while n % i == 0:
n //= i
k += 1
if k: ret[i] = k
i += 1 + i % 2
if i == 101 and n >= 2**20:
while n > 1:
if isPrimeMR(n):
ret[n], n = 1, 1
else:
rhoFlg = 1
j = findFactorRho(n)
k = 0
while n % j == 0:
n //= j
k += 1
ret[j] = k
if n > 1: ret[n] = 1
if rhoFlg: ret = {x: ret[x] for x in sorted(ret)}
return ret
def divisors(n):
res = [1]
prime = primeFactor(n)
for p in prime:
newres = []
for d in res:
for j in range(prime[p] + 1):
newres.append(d * p**j)
res = newres
res.sort()
return res
def xorfactorial(num):
if num == 0:
return 0
elif num == 1:
return 1
elif num == 2:
return 3
elif num == 3:
return 0
else:
x = baseorder(num)
return (2**x) * ((num - 2**x + 1) % 2) + function(num - 2**x)
def xorconv(n, X, Y):
if n == 0:
res = [(X[0] * Y[0]) % mod]
return res
x = [X[i] + X[i + 2**(n - 1)] for i in range(2**(n - 1))]
y = [Y[i] + Y[i + 2**(n - 1)] for i in range(2**(n - 1))]
z = [X[i] - X[i + 2**(n - 1)] for i in range(2**(n - 1))]
w = [Y[i] - Y[i + 2**(n - 1)] for i in range(2**(n - 1))]
res1 = xorconv(n - 1, x, y)
res2 = xorconv(n - 1, z, w)
former = [(res1[i] + res2[i]) * inv for i in range(2**(n - 1))]
latter = [(res1[i] - res2[i]) * inv for i in range(2**(n - 1))]
former = list(map(lambda x: x % mod, former))
latter = list(map(lambda x: x % mod, latter))
return former + latter
def merge_sort(A, B):
pos_A, pos_B = 0, 0
n, m = len(A), len(B)
res = []
while pos_A < n and pos_B < m:
a, b = A[pos_A], B[pos_B]
if a < b:
res.append(a)
pos_A += 1
else:
res.append(b)
pos_B += 1
res += A[pos_A:]
res += B[pos_B:]
return res
class UnionFindVerSize():
def __init__(self, N):
self._parent = [n for n in range(0, N)]
self._size = [1] * N
self.group = N
def find_root(self, x):
if self._parent[x] == x: return x
self._parent[x] = self.find_root(self._parent[x])
stack = [x]
while self._parent[stack[-1]] != stack[-1]:
stack.append(self._parent[stack[-1]])
for v in stack:
self._parent[v] = stack[-1]
return self._parent[x]
def unite(self, x, y):
gx = self.find_root(x)
gy = self.find_root(y)
if gx == gy: return
self.group -= 1
if self._size[gx] < self._size[gy]:
self._parent[gx] = gy
self._size[gy] += self._size[gx]
else:
self._parent[gy] = gx
self._size[gx] += self._size[gy]
def get_size(self, x):
return self._size[self.find_root(x)]
def is_same_group(self, x, y):
return self.find_root(x) == self.find_root(y)
class WeightedUnionFind():
def __init__(self, N):
self.parent = [i for i in range(N)]
self.size = [1 for i in range(N)]
self.val = [0 for i in range(N)]
self.flag = True
self.edge = [[] for i in range(N)]
def dfs(self, v, pv):
stack = [(v, pv)]
new_parent = self.parent[pv]
while stack:
v, pv = stack.pop()
self.parent[v] = new_parent
for nv, w in self.edge[v]:
if nv != pv:
self.val[nv] = self.val[v] + w
stack.append((nv, v))
def unite(self, x, y, w):
if not self.flag:
return
if self.parent[x] == self.parent[y]:
self.flag = (self.val[x] - self.val[y] == w)
return
if self.size[self.parent[x]] > self.size[self.parent[y]]:
self.edge[x].append((y, -w))
self.edge[y].append((x, w))
self.size[x] += self.size[y]
self.val[y] = self.val[x] - w
self.dfs(y, x)
else:
self.edge[x].append((y, -w))
self.edge[y].append((x, w))
self.size[y] += self.size[x]
self.val[x] = self.val[y] + w
self.dfs(x, y)
class Dijkstra():
class Edge():
def __init__(self, _to, _cost):
self.to = _to
self.cost = _cost
def __init__(self, V):
self.G = [[] for i in range(V)]
self._E = 0
self._V = V
@property
def E(self):
return self._E
@property
def V(self):
return self._V
def add_edge(self, _from, _to, _cost):
self.G[_from].append(self.Edge(_to, _cost))
self._E += 1
def shortest_path(self, s):
import heapq
que = []
d = [10**15] * self.V
d[s] = 0
heapq.heappush(que, (0, s))
while len(que) != 0:
cost, v = heapq.heappop(que)
if d[v] < cost: continue
for i in range(len(self.G[v])):
e = self.G[v][i]
if d[e.to] > d[v] + e.cost:
d[e.to] = d[v] + e.cost
heapq.heappush(que, (d[e.to], e.to))
return d
#Z[i]:length of the longest list starting from S[i] which is also a prefix of S
#O(|S|)
def Z_algorithm(s):
N = len(s)
Z_alg = [0] * N
Z_alg[0] = N
i = 1
j = 0
while i < N:
while i + j < N and s[j] == s[i + j]:
j += 1
Z_alg[i] = j
if j == 0:
i += 1
continue
k = 1
while i + k < N and k + Z_alg[k] < j:
Z_alg[i + k] = Z_alg[k]
k += 1
i += k
j -= k
return Z_alg
class BIT():
def __init__(self, n, mod=0):
self.BIT = [0] * (n + 1)
self.num = n
self.mod = mod
def query(self, idx):
res_sum = 0
mod = self.mod
while idx > 0:
res_sum += self.BIT[idx]
if mod:
res_sum %= mod
idx -= idx & (-idx)
return res_sum
#Ai += x O(logN)
def update(self, idx, x):
mod = self.mod
while idx <= self.num:
self.BIT[idx] += x
if mod:
self.BIT[idx] %= mod
idx += idx & (-idx)
return
class dancinglink():
def __init__(self, n, debug=False):
self.n = n
self.debug = debug
self._left = [i - 1 for i in range(n)]
self._right = [i + 1 for i in range(n)]
self.exist = [True for i in range(n)]
def pop(self, k):
if self.debug:
assert self.exist[k]
L = self._left[k]
R = self._right[k]
if L != -1:
if R != self.n:
self._right[L], self._left[R] = R, L
else:
self._right[L] = self.n
elif R != self.n:
self._left[R] = -1
self.exist[k] = False
def left(self, idx, k=1):
if self.debug:
assert self.exist[idx]
res = idx
while k:
res = self._left[res]
if res == -1:
break
k -= 1
return res
def right(self, idx, k=1):
if self.debug:
assert self.exist[idx]
res = idx
while k:
res = self._right[res]
if res == self.n:
break
k -= 1
return res
class SparseTable():
def __init__(self, A, merge_func, ide_ele):
N = len(A)
n = N.bit_length()
self.table = [[ide_ele for i in range(n)] for i in range(N)]
self.merge_func = merge_func
for i in range(N):
self.table[i][0] = A[i]
for j in range(1, n):
for i in range(0, N - 2**j + 1):
f = self.table[i][j - 1]
s = self.table[i + 2**(j - 1)][j - 1]
self.table[i][j] = self.merge_func(f, s)
def query(self, s, t):
b = t - s + 1
m = b.bit_length() - 1
return self.merge_func(self.table[s][m], self.table[t - 2**m + 1][m])
class BinaryTrie:
class node:
def __init__(self, val):
self.left = None
self.right = None
self.max = val
def __init__(self):
self.root = self.node(-10**15)
def append(self, key, val):
pos = self.root
for i in range(29, -1, -1):
pos.max = max(pos.max, val)
if key >> i & 1:
if pos.right is None:
pos.right = self.node(val)
pos = pos.right
else:
pos = pos.right
else:
if pos.left is None:
pos.left = self.node(val)
pos = pos.left
else:
pos = pos.left
pos.max = max(pos.max, val)
def search(self, M, xor):
res = -10**15
pos = self.root
for i in range(29, -1, -1):
if pos is None:
break
if M >> i & 1:
if xor >> i & 1:
if pos.right:
res = max(res, pos.right.max)
pos = pos.left
else:
if pos.left:
res = max(res, pos.left.max)
pos = pos.right
else:
if xor >> i & 1:
pos = pos.right
else:
pos = pos.left
if pos:
res = max(res, pos.max)
return res
def solveequation(edge, ans, n, m):
#edge=[[to,dire,id]...]
x = [0] * m
used = [False] * n
for v in range(n):
if used[v]:
continue
y = dfs(v)
if y != 0:
return False
return x
def dfs(v):
used[v] = True
r = ans[v]
for to, dire, id in edge[v]:
if used[to]:
continue
y = dfs(to)
if dire == -1:
x[id] = y
else:
x[id] = -y
r += y
return r
class SegmentTree:
def __init__(self, init_val, segfunc, ide_ele):
n = len(init_val)
self.segfunc = segfunc
self.ide_ele = ide_ele
self.num = 1 << (n - 1).bit_length()
self.tree = [ide_ele] * 2 * self.num
self.size = n
for i in range(n):
self.tree[self.num + i] = init_val[i]
for i in range(self.num - 1, 0, -1):
self.tree[i] = self.segfunc(self.tree[2 * i], self.tree[2 * i + 1])
def update(self, k, x):
k += self.num
self.tree[k] = x
while k > 1:
self.tree[k >> 1] = self.segfunc(self.tree[k], self.tree[k ^ 1])
k >>= 1
def query(self, l, r):
if r == self.size:
r = self.num
res = self.ide_ele
l += self.num
r += self.num
while l < r:
if l & 1:
res = self.segfunc(res, self.tree[l])
l += 1
if r & 1:
res = self.segfunc(res, self.tree[r - 1])
l >>= 1
r >>= 1
return res
def bisect_l(self, l, r, x):
l += self.num
r += self.num
Lmin = -1
Rmin = -1
while l < r:
if l & 1:
if self.tree[l] <= x and Lmin == -1:
Lmin = l
l += 1
if r & 1:
if self.tree[r - 1] <= x:
Rmin = r - 1
l >>= 1
r >>= 1
if Lmin != -1:
pos = Lmin
while pos < self.num:
if self.tree[2 * pos] <= x:
pos = 2 * pos
else:
pos = 2 * pos + 1
return pos - self.num
elif Rmin != -1:
pos = Rmin
while pos < self.num:
if self.tree[2 * pos] <= x:
pos = 2 * pos
else:
pos = 2 * pos + 1
return pos - self.num
else:
return -1
import sys, random, bisect
from collections import deque, defaultdict
from heapq import heapify, heappop, heappush
from itertools import permutations
from math import gcd, log
input = lambda: sys.stdin.readline().rstrip()
mi = lambda: map(int, input().split())
li = lambda: list(mi())
N, Q = mi()
edge = [[] for i in range(N)]
for _ in range(N - 1):
a, b = mi()
edge[a - 1].append(b - 1)
edge[b - 1].append(a - 1)
parent = [-1 for v in range(N)]
deq = deque([0])
depth = [0 for v in range(N)]
while deq:
v = deq.popleft()
for nv in edge[v]:
if nv != parent[v]:
depth[nv] = depth[v] + 1
parent[nv] = v
deq.append(nv)
for _ in range(Q):
c, d = mi()
c, d = c - 1, d - 1
print("Road" if (depth[c] + depth[d]) & 1 else "Town")
| [
"[email protected]"
] | |
9cf854e5f10787c00d66b46032ef3d8ea4d91943 | 8f021f68cd0949afa8d119582c0b419b014919d8 | /URIOJ/uri2373.py | c142fff957d0f2e938944a791575fb1a31c7cdf5 | [] | no_license | Jonatankk/codigos | b9c8426c2f33b5142460a84337480b147169b3e6 | 233ae668bdf6cdd12dbc9ef243fb4ccdab49c933 | refs/heads/master | 2022-07-22T11:09:27.271029 | 2020-05-09T20:57:42 | 2020-05-09T20:57:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 365 | py | # -*- coding: utf-8 -*-
# Leonardo Deliyannis Constantin
# URI 2373 - Garçom
def main():
N = int(input())
ans = 0
for _ in range(N):
L, C = map(int, input().split())
if L > C:
ans += C
print(ans)
if __name__ == '__main__':
while True:
try:
main()
except EOFError:
break
| [
"[email protected]"
] | |
13f374290b54460f585cc996dd27042b763b7bc7 | 46a5df524f1d96baf94f6eb0f6222f2b856235f3 | /src/puzzle/problems/cryptogram_problem.py | c45547433c2d6fad7a436df5cd29d4b51b951e07 | [
"MIT"
] | permissive | PhilHarnish/forge | 5dfbb0aa2afdb91e55d85187bd86fbeb9b6b2888 | c544fb8b499e1e13793c94159f4c35bce187311e | refs/heads/master | 2023-03-11T17:23:46.569359 | 2023-02-25T15:09:01 | 2023-02-25T15:09:01 | 1,818,598 | 2 | 0 | MIT | 2023-02-25T15:09:02 | 2011-05-29T19:36:53 | Jupyter Notebook | UTF-8 | Python | false | false | 5,120 | py | from data import warehouse
from data.seek_sets import crypto_seek_set
from puzzle.problems import problem
# Humans will often choose a ROT value which is ~180 degrees away from A=A.
# For example: ROT13 is common and ROT1 or ROT25 are very uncommon.
_ROT_OFFSETS = list(sorted(range(1, 25), key=lambda i: abs(26 / 2 - i)))
_ALPHABET = 'abcdefghijklmnopqrstuvwxyz'
_ALPHABET_UPPER = _ALPHABET.upper()
_ROT_TRANSLATIONS = [None] + [
str.maketrans(_ALPHABET, _ALPHABET[i:] + _ALPHABET[:i]) for i in range(1, 26)
]
# At least 1/5th of the words must convert.
_MIN_CONVERSION = 0.2
# Minimum threshold for an "interesting" translation.
_MIN_WORD_THRESHOLD = 45000
# Minimum number of characters to consider "translated".
_MIN_WORD = 3
# If Trie yields results greater than this per character it is "good".
_TARGET_WORD_SCORE_RATE = 200000000
class CryptogramProblem(problem.Problem):
def __init__(self, name, lines, **kwargs):
super(CryptogramProblem, self).__init__(name, lines, **kwargs)
_, self._words = _parse(lines)
@staticmethod
def score(lines):
# Look at all of the "words" in all lines.
tokens, words = _parse(lines)
if not words:
return 0 # Nothing to cryptogram.
if len(words) < len(tokens) // 2:
return 0 # Fewer than half of the tokens could be considered words.
# How many words appear to be gibberish?
known_words = warehouse.get('/words/unigram')
are_words = sum(word in known_words for word in words)
if are_words < len(words) // 8 + 1:
# Fewer than 1 in 8 of the original words are known.
return 1
# Something with 5+ of words *might* be a cryptic clue.
return max(0.0, 0.25 * (min(5, len(words)) / 5))
def _solve_iter(self):
# First attempt a rotN solve.
all_text = '\n'.join(self.lines)
good_match = False
for solution, weight in _generate_rot_n(all_text, self._words):
good_match = good_match or weight == 1
yield solution, weight
if good_match:
return
for solution in _generate_partitioned_cryptograms(all_text, self._words):
yield solution
def _solve(self):
raise NotImplementedError()
def _parse(lines):
tokens = ' '.join(lines).lower().split()
return tokens, list(filter(str.isalpha, tokens))
def _generate_rot_n(all_text, words):
for offset in _ROT_OFFSETS:
score = rot_n_score(words, offset)
if score > _MIN_CONVERSION:
solution = all_text.translate(_ROT_TRANSLATIONS[offset])
yield '%s (rot%s)' % (solution, offset), score
def rot_n_score(words, n):
""" Score `words` for rotation `n`.
:param words:
:param n:
:return: Returns 1 if every single word translates to a common word.
If all words are common score decreases proportional to chars translated.
If all translations are uncommon then
"""
unigrams = warehouse.get('/words/unigram')
score = 0
all = 0
for word in words:
l = len(word)
if l < _MIN_WORD:
continue
translated = word.translate(_ROT_TRANSLATIONS[n])
if translated in unigrams:
word_weight = min(1, unigrams[translated] / _MIN_WORD_THRESHOLD)
score += l * word_weight
all += l
return score / all
def _generate_partitioned_cryptograms(all_text, words):
# Focus on the longest words.
sorted_words = sorted(set(words), key=lambda x: -len(x))
trie = warehouse.get('/words/unigram/trie')
# Note: This score currently includes whitespace etc.
target_score = len(all_text) * _TARGET_WORD_SCORE_RATE
for trans, score in _partitioned_cryptograms_from(sorted_words, [], trie):
yield all_text.translate(trans), min(1, score / target_score)
def _partitioned_cryptograms_from(crypto_words, normal_words, trie):
pos = len(normal_words)
end = len(crypto_words) - 1
translation = _make_translation(crypto_words, normal_words)
seek_set = crypto_seek_set.CryptoSeekSet(
crypto_words[pos], translation=translation)
for word, score in trie.walk(seek_set, exact_match=True):
normal_words.append(word)
if pos == end:
yield _make_solution_translation_table(translation, crypto_words[pos],
normal_words[pos]), score
else:
for solution, child_score in _partitioned_cryptograms_from(
crypto_words, normal_words, trie):
# Up the trampoline, accumulating score.
yield solution, score + child_score
normal_words.pop()
def _make_translation(crypto_words, normal_words):
translation = {}
for crypto_word, normal_word in zip(crypto_words, normal_words):
for crypto_c, normal_c in zip(crypto_word, normal_word):
if crypto_c in translation and translation[crypto_c] != normal_c:
raise IndexError('Inconsistent translation %s -> %s' % (
crypto_words, normal_words))
translation[crypto_c] = normal_c
return translation
def _make_solution_translation_table(translation, last_crypto, last_word):
table = str.maketrans(translation)
table.update(str.maketrans(last_crypto, last_word))
# Determine upper case letters too.
table.update(
str.maketrans(_ALPHABET_UPPER, _ALPHABET.translate(table).upper()))
return table
| [
"[email protected]"
] | |
57e952da8acde84cb4f927b0adec5f8de45dfaef | 74aea619a499c6cba424a3f790f36315e647c55e | /Dynamo/src/RH Polyline to DS Polycurve.py | 44f0bdff25cfc21bf7f28ae66f98043f0c7ac3c9 | [] | no_license | mostaphaRoudsari/MantisShrimp | ca7c38af196116877efbab397adc17446616a9c8 | b8c70b1403103d60f85699608161476d628afede | refs/heads/master | 2021-01-15T09:41:44.314306 | 2015-01-13T23:56:00 | 2015-01-13T23:56:00 | 24,969,552 | 2 | 0 | null | 2015-01-13T23:56:00 | 2014-10-09T02:06:39 | Python | UTF-8 | Python | false | false | 2,334 | py | #Copyright(c) 2014, Konrad Sobon
# @arch_laboratory, http://archi-lab.net
import clr
import sys
clr.AddReference('ProtoGeometry')
pyt_path = r'C:\Program Files (x86)\IronPython 2.7\Lib'
sys.path.append(pyt_path)
import os
appDataPath = os.getenv('APPDATA')
msPath = appDataPath + r"\Dynamo\0.7\packages\Mantis Shrimp\extra"
if msPath not in sys.path:
sys.path.Add(msPath)
possibleRhPaths = []
possibleRhPaths.append(r"C:\Program Files\Rhinoceros 5 (64-bit)\System\RhinoCommon.dll")
possibleRhPaths.append(r"C:\Program Files\Rhinoceros 5.0 (64-bit)\System\RhinoCommon.dll")
possibleRhPaths.append(r"C:\Program Files\McNeel\Rhinoceros 5.0\System\RhinoCommon.dll")
possibleRhPaths.append(msPath)
checkPaths = map(lambda x: os.path.exists(x), possibleRhPaths)
for i, j in zip(possibleRhPaths, checkPaths):
if j and i not in sys.path:
sys.path.Add(i)
clr.AddReferenceToFileAndPath(i)
from Autodesk.DesignScript.Geometry import *
import Rhino as rc
#The inputs to this node will be stored as a list in the IN variable.
dataEnteringNode = IN
rhObjects = IN[0]
_units = IN[1]
#unit conversion function from Rhino to DS
def toDSUnits(_units):
if _units == rc.UnitSystem.Millimeters:
return 0.001
elif _units == rc.UnitSystem.Centimeters:
return 0.01
elif _units == rc.UnitSystem.Decimeters:
return 0.1
elif _units == rc.UnitSystem.Meters:
return 1
elif _units == rc.UnitSystem.Inches:
return 0.0254
elif _units == rc.UnitSystem.Feet:
return 0.3048
elif _units == rc.UnitSystem.Yards:
return 0.9144
#3dPoint Conversion function
def rhPoint3dToPoint(rhPoint):
rhPointX = rhPoint.X * toDSUnits(_units)
rhPointY = rhPoint.Y * toDSUnits(_units)
rhPointZ = rhPoint.Z * toDSUnits(_units)
return Point.ByCoordinates(rhPointX, rhPointY, rhPointZ)
#poly curve conversion function
def rhCurveToPolyCurve(rhCurve):
ptArray = []
pCount = rhCurve.PointCount
for i in range(0, pCount):
dsPoint = rhPoint3dToPoint(rhCurve.Point(i))
ptArray.append(dsPoint)
dsPolyCurve = PolyCurve.ByPoints(ptArray)
del ptArray[:]
return dsPolyCurve
#convert rhino/gh geometry to ds geometry
dsPolyCurves = []
for i in rhObjects:
try:
i = i.Geometry
except:
pass
if i.ToString() == "Rhino.Geometry.PolylineCurve":
dsPolyCurves.append(rhCurveToPolyCurve(i))
#Assign your output to the OUT variable
OUT = dsPolyCurves
| [
"[email protected]"
] | |
919ca6f258d0ad24aa2cd1da271099356e257b9d | e72c9e619629f1b29066bd05f76232895fb3586e | /srcGetskl/ore_reverbJar.py | 6ef2d57a123dbf070429020207d2c7f74366c809 | [] | no_license | sarveshsparab/FrED | b671e356b71b143396c0bc7e98544eb5b3c065a4 | 87dd75a576e8270085d182cf78baaa7ccab84357 | refs/heads/master | 2020-04-25T20:34:20.152512 | 2018-11-12T13:24:39 | 2018-11-12T13:24:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,066 | py | import sys
import os
sys.path.append("/home/yxqin/Scripts")
from strOperation import * # normRep normMen
####################################
#get relation skeletons from relation file(extracted by reverb.jar)
def getRelskl_fromRel(filename):
print "Processing " + filename
relFile = file(filename)
outputDir = os.path.split(filename)[0]
tStr = filename[-2:]
outputFile = file(outputDir + r"/relSkl_2013-01-" + tStr, "w")
lineIdx = 0
previousTid = tStr + "0"
previousText = ""
while 1:
lineStr = relFile.readline()
if len(lineStr) <= 0:
print str(lineIdx) + " lines are processed. End of file. " + str(time.asctime())
break
lineIdx += 1
arr = lineStr.split("\t")
relArr = []
#print arr
tid = tStr+arr[1]
arg1 = getArg(arr[-3])
rel = "_".join(arr[-2].split(" "))
arg2 = getArg(arr[-1][:-1])
conf = float(arr[11])
relArr.append(tid)
relArr.append(normRep(arg1))
relArr.append(normRep(rel))
relArr.append(normRep(arg2))
relArr.append(conf)
print relArr
text = "_".join(relArr[1:-1])
if tid != previousTid:
if len(previousText) > 1:
outputFile.write(previousTid + "\t" + previousText + "\n")
#print "## " + previousTid + " " + previousText
previousTid = tid
previousText = text
else:
previousText += (" "+text)
if lineIdx % 100000 == 0:
print "# tweets processed: " + str(lineIdx) + " at " + str(time.asctime())
outputFile.close()
relFile.close()
# normMen to be processed
def getArg(item):
if len(item) > 0:
return "_".join(normMen(item.split(" ")))
else:
return item
if __name__ == "__main__":
if len(sys.argv) < 2:
print "Usage ore_reverbJar.py inputFileName"
else:
filename = sys.argv[1]
# extract skl from Relation file(reverb.jar)
getRelskl_fromRel(filename)
| [
"[email protected]"
] | |
e8026ecd42c8c44fa6417c976f50f828cc83b40e | 5ca042838f15137130817b9e1766d8496a73d5db | /venv/bin/django-admin.py | f07913ab7e1689b81f250bf67a6114e95ec4e7ca | [
"MIT"
] | permissive | Emmanuel-9/Neighbourhood | 63257368e357adba3280f63a8f9d8ef77bcdfb23 | f2635cbc00181da97bdf17dee283eb905db2ec55 | refs/heads/master | 2022-12-08T06:18:02.137084 | 2020-08-21T11:54:17 | 2020-08-21T11:54:17 | 288,374,723 | 0 | 1 | null | 2020-08-21T07:56:22 | 2020-08-18T06:32:40 | HTML | UTF-8 | Python | false | false | 705 | py | #!/home/joan_e/code/Moringa/core/django/Neighbourhood/venv/bin/python3
# When the django-admin.py deprecation ends, remove this script.
import warnings
from django.core import management
try:
from django.utils.deprecation import RemovedInDjango40Warning
except ImportError:
raise ImportError(
'django-admin.py was deprecated in Django 3.1 and removed in Django '
'4.0. Please manually remove this script from your virtual environment '
'and use django-admin instead.'
)
if __name__ == "__main__":
warnings.warn(
'django-admin.py is deprecated in favor of django-admin.',
RemovedInDjango40Warning,
)
management.execute_from_command_line()
| [
"[email protected]"
] | |
90697d5b1e4c3cb7af501969295bfdaf846bf33f | 708074835900ae623239ce3c0d1e6f948b799fd0 | /ftp-1/ftp_server/bin/start.py | b5fa79aadef7fe47c9fffb6743a4175d84aeb3a0 | [] | no_license | hukeyy/learn_python | 66688bcbaa43d79775030d2876979bbda08892ef | c71a37da88b089316536587ed47d32405bd987a3 | refs/heads/master | 2020-03-21T11:07:24.049328 | 2018-12-25T11:59:17 | 2018-12-25T11:59:17 | 138,490,613 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 363 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
# Author: hkey
import os, sys
BASE_DIR = os.path.dirname(os.getcwd())
sys.path.insert(0, BASE_DIR)
from modules import socket_server
from conf.settings import IP_PORT
if __name__ == '__main__':
server = socket_server.socketserver.ThreadingTCPServer((IP_PORT), socket_server.MyServer)
server.serve_forever()
| [
"[email protected]"
] | |
3a2837071c1a3bfbc6361ad134e368663c3f18d1 | 0a2cc497665f2a14460577f129405f6e4f793791 | /sdk/recoveryservices/azure-mgmt-recoveryservicesbackup/azure/mgmt/recoveryservicesbackup/aio/operations/_backup_crr_job_details_operations.py | 33a1fb75dc9c475d6bf5fbe74a51997aee684121 | [
"MIT",
"LGPL-2.1-or-later",
"LicenseRef-scancode-generic-cla"
] | permissive | hivyas/azure-sdk-for-python | 112158aa9e1dd6e30cf6b3dde19f5db6ea2a577b | 8b3258fa45f5dc25236c22ad950e48aa4e1c181c | refs/heads/master | 2023-06-17T12:01:26.392186 | 2021-05-18T19:56:01 | 2021-05-18T19:56:01 | 313,761,277 | 1 | 1 | MIT | 2020-12-02T17:48:22 | 2020-11-17T22:42:00 | Python | UTF-8 | Python | false | false | 4,994 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class BackupCrrJobDetailsOperations:
"""BackupCrrJobDetailsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.recoveryservicesbackup.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get(
self,
azure_region: str,
parameters: "_models.CrrJobRequest",
**kwargs
) -> "_models.JobResource":
"""Get CRR job details from target region.
Get CRR job details from target region.
:param azure_region: Azure region to hit Api.
:type azure_region: str
:param parameters: CRR Job request.
:type parameters: ~azure.mgmt.recoveryservicesbackup.models.CrrJobRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: JobResource, or the result of cls(response)
:rtype: ~azure.mgmt.recoveryservicesbackup.models.JobResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-20"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'azureRegion': self._serialize.url("azure_region", azure_region, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'CrrJobRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.NewErrorResponseAutoGenerated, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('JobResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.RecoveryServices/locations/{azureRegion}/backupCrrJob'} # type: ignore
| [
"[email protected]"
] | |
517106683f3f57201f69fa5b34f6128747290be9 | 63bc95150f6af526199454602e5689bfadc882ba | /02/ex2-4.py | 16afec85561b0cca064aebd54ca9ae58780d4580 | [] | no_license | veenary/python-src | fd61d22a58d452ccb251402fecb0b7babd5372a7 | d61374bc32b8ebe3b2be366a6de259680821a4e1 | refs/heads/master | 2023-03-17T10:07:27.704611 | 2021-03-10T01:31:56 | 2021-03-10T01:31:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 32 | py | x = 30
print(x)
print(type(x)) | [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.