id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
/CleanAdminDjango-1.5.3.1.tar.gz/CleanAdminDjango-1.5.3.1/django/contrib/localflavor/ca/forms.py
|
from __future__ import absolute_import, unicode_literals
import re
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import Field, CharField, Select
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
phone_digits_re = re.compile(r'^(?:1-?)?(\d{3})[-\.]?(\d{3})[-\.]?(\d{4})$')
sin_re = re.compile(r"^(\d{3})-(\d{3})-(\d{3})$")
class CAPostalCodeField(CharField):
"""
Canadian postal code field.
Validates against known invalid characters: D, F, I, O, Q, U
Additionally the first character cannot be Z or W.
For more info see:
http://www.canadapost.ca/tools/pg/manual/PGaddress-e.asp#1402170
"""
default_error_messages = {
'invalid': _('Enter a postal code in the format XXX XXX.'),
}
postcode_regex = re.compile(r'^([ABCEGHJKLMNPRSTVXY]\d[ABCEGHJKLMNPRSTVWXYZ]) *(\d[ABCEGHJKLMNPRSTVWXYZ]\d)$')
def clean(self, value):
value = super(CAPostalCodeField, self).clean(value)
if value in EMPTY_VALUES:
return ''
postcode = value.upper().strip()
m = self.postcode_regex.match(postcode)
if not m:
raise ValidationError(self.default_error_messages['invalid'])
return "%s %s" % (m.group(1), m.group(2))
class CAPhoneNumberField(Field):
"""Canadian phone number field."""
default_error_messages = {
'invalid': _('Phone numbers must be in XXX-XXX-XXXX format.'),
}
def clean(self, value):
"""Validate a phone number.
"""
super(CAPhoneNumberField, self).clean(value)
if value in EMPTY_VALUES:
return ''
value = re.sub('(\(|\)|\s+)', '', smart_text(value))
m = phone_digits_re.search(value)
if m:
return '%s-%s-%s' % (m.group(1), m.group(2), m.group(3))
raise ValidationError(self.error_messages['invalid'])
class CAProvinceField(Field):
"""
A form field that validates its input is a Canadian province name or abbreviation.
It normalizes the input to the standard two-leter postal service
abbreviation for the given province.
"""
default_error_messages = {
'invalid': _('Enter a Canadian province or territory.'),
}
def clean(self, value):
super(CAProvinceField, self).clean(value)
if value in EMPTY_VALUES:
return ''
try:
value = value.strip().lower()
except AttributeError:
pass
else:
# Load data in memory only when it is required, see also #17275
from .ca_provinces import PROVINCES_NORMALIZED
try:
return PROVINCES_NORMALIZED[value.strip().lower()]
except KeyError:
pass
raise ValidationError(self.error_messages['invalid'])
class CAProvinceSelect(Select):
"""
A Select widget that uses a list of Canadian provinces and
territories as its choices.
"""
def __init__(self, attrs=None):
# Load data in memory only when it is required, see also #17275
from .ca_provinces import PROVINCE_CHOICES
super(CAProvinceSelect, self).__init__(attrs, choices=PROVINCE_CHOICES)
class CASocialInsuranceNumberField(Field):
"""
A Canadian Social Insurance Number (SIN).
Checks the following rules to determine whether the number is valid:
* Conforms to the XXX-XXX-XXX format.
* Passes the check digit process "Luhn Algorithm"
See: http://en.wikipedia.org/wiki/Social_Insurance_Number
"""
default_error_messages = {
'invalid': _('Enter a valid Canadian Social Insurance number in XXX-XXX-XXX format.'),
}
def clean(self, value):
super(CASocialInsuranceNumberField, self).clean(value)
if value in EMPTY_VALUES:
return ''
match = re.match(sin_re, value)
if not match:
raise ValidationError(self.error_messages['invalid'])
number = '%s-%s-%s' % (match.group(1), match.group(2), match.group(3))
check_number = '%s%s%s' % (match.group(1), match.group(2), match.group(3))
if not self.luhn_checksum_is_valid(check_number):
raise ValidationError(self.error_messages['invalid'])
return number
def luhn_checksum_is_valid(self, number):
"""
Checks to make sure that the SIN passes a luhn mod-10 checksum
See: http://en.wikipedia.org/wiki/Luhn_algorithm
"""
sum = 0
num_digits = len(number)
oddeven = num_digits & 1
for count in range(0, num_digits):
digit = int(number[count])
if not (( count & 1 ) ^ oddeven ):
digit = digit * 2
if digit > 9:
digit = digit - 9
sum = sum + digit
return ( (sum % 10) == 0 )
|
PypiClean
|
/django-helpdesk3000-0.3.4.tar.gz/django-helpdesk3000-0.3.4/helpdesk/south_migrations/0001_initial.py
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Queue'
db.create_table('helpdesk_queue', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=100)),
('slug', self.gf('django.db.models.fields.SlugField')(max_length=50, db_index=True)),
('email_address', self.gf('django.db.models.fields.EmailField')(max_length=75, null=True, blank=True)),
('locale', self.gf('django.db.models.fields.CharField')(max_length=10, null=True, blank=True)),
('allow_public_submission', self.gf('django.db.models.fields.BooleanField')(default=False)),
('allow_email_submission', self.gf('django.db.models.fields.BooleanField')(default=False)),
('escalate_days', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('new_ticket_cc', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
('updated_ticket_cc', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
('email_box_type', self.gf('django.db.models.fields.CharField')(max_length=5, null=True, blank=True)),
('email_box_host', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
('email_box_port', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('email_box_ssl', self.gf('django.db.models.fields.BooleanField')(default=False)),
('email_box_user', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
('email_box_pass', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
('email_box_imap_folder', self.gf('django.db.models.fields.CharField')(max_length=100, null=True, blank=True)),
('email_box_interval', self.gf('django.db.models.fields.IntegerField')(default='5', null=True, blank=True)),
('email_box_last_check', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
))
db.send_create_signal('helpdesk', ['Queue'])
# Adding model 'Ticket'
db.create_table('helpdesk_ticket', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=200)),
('queue', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['helpdesk.Queue'])),
('created', self.gf('django.db.models.fields.DateTimeField')(blank=True)),
('modified', self.gf('django.db.models.fields.DateTimeField')(blank=True)),
('submitter_email', self.gf('django.db.models.fields.EmailField')(max_length=75, null=True, blank=True)),
('assigned_to', self.gf('django.db.models.fields.related.ForeignKey')(related_name='assigned_to', blank=True, null=True, to=orm['auth.User'])),
('status', self.gf('django.db.models.fields.IntegerField')(default=1)),
('on_hold', self.gf('django.db.models.fields.BooleanField')(default=False)),
('description', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('resolution', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('priority', self.gf('django.db.models.fields.IntegerField')(default=3, blank=3)),
('last_escalation', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
))
db.send_create_signal('helpdesk', ['Ticket'])
# Adding model 'FollowUp'
db.create_table('helpdesk_followup', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('ticket', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['helpdesk.Ticket'])),
('date', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2011, 4, 27, 15, 17, 4, 272904))),
('title', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
('comment', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('public', self.gf('django.db.models.fields.BooleanField')(default=False)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, blank=True)),
('new_status', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
))
db.send_create_signal('helpdesk', ['FollowUp'])
# Adding model 'TicketChange'
db.create_table('helpdesk_ticketchange', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('followup', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['helpdesk.FollowUp'])),
('field', self.gf('django.db.models.fields.CharField')(max_length=100)),
('old_value', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('new_value', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
))
db.send_create_signal('helpdesk', ['TicketChange'])
# Adding model 'Attachment'
db.create_table('helpdesk_attachment', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('followup', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['helpdesk.FollowUp'])),
('file', self.gf('django.db.models.fields.files.FileField')(max_length=100)),
('filename', self.gf('django.db.models.fields.CharField')(max_length=100)),
('mime_type', self.gf('django.db.models.fields.CharField')(max_length=30)),
('size', self.gf('django.db.models.fields.IntegerField')()),
))
db.send_create_signal('helpdesk', ['Attachment'])
# Adding model 'PreSetReply'
db.create_table('helpdesk_presetreply', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('body', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal('helpdesk', ['PreSetReply'])
# Adding M2M table for field queues on 'PreSetReply'
db.create_table('helpdesk_presetreply_queues', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('presetreply', models.ForeignKey(orm['helpdesk.presetreply'], null=False)),
('queue', models.ForeignKey(orm['helpdesk.queue'], null=False))
))
db.create_unique('helpdesk_presetreply_queues', ['presetreply_id', 'queue_id'])
# Adding model 'EscalationExclusion'
db.create_table('helpdesk_escalationexclusion', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('date', self.gf('django.db.models.fields.DateField')()),
))
db.send_create_signal('helpdesk', ['EscalationExclusion'])
# Adding M2M table for field queues on 'EscalationExclusion'
db.create_table('helpdesk_escalationexclusion_queues', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('escalationexclusion', models.ForeignKey(orm['helpdesk.escalationexclusion'], null=False)),
('queue', models.ForeignKey(orm['helpdesk.queue'], null=False))
))
db.create_unique('helpdesk_escalationexclusion_queues', ['escalationexclusion_id', 'queue_id'])
# Adding model 'EmailTemplate'
db.create_table('helpdesk_emailtemplate', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('template_name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('subject', self.gf('django.db.models.fields.CharField')(max_length=100)),
('heading', self.gf('django.db.models.fields.CharField')(max_length=100)),
('plain_text', self.gf('django.db.models.fields.TextField')()),
('html', self.gf('django.db.models.fields.TextField')()),
('locale', self.gf('django.db.models.fields.CharField')(max_length=10, null=True, blank=True)),
))
db.send_create_signal('helpdesk', ['EmailTemplate'])
# Adding model 'KBCategory'
db.create_table('helpdesk_kbcategory', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=100)),
('slug', self.gf('django.db.models.fields.SlugField')(max_length=50, db_index=True)),
('description', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal('helpdesk', ['KBCategory'])
# Adding model 'KBItem'
db.create_table('helpdesk_kbitem', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['helpdesk.KBCategory'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=100)),
('question', self.gf('django.db.models.fields.TextField')()),
('answer', self.gf('django.db.models.fields.TextField')()),
('votes', self.gf('django.db.models.fields.IntegerField')(default=0)),
('recommendations', self.gf('django.db.models.fields.IntegerField')(default=0)),
('last_updated', self.gf('django.db.models.fields.DateTimeField')(blank=True)),
))
db.send_create_signal('helpdesk', ['KBItem'])
# Adding model 'SavedSearch'
db.create_table('helpdesk_savedsearch', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=100)),
('shared', self.gf('django.db.models.fields.BooleanField')(default=False)),
('query', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal('helpdesk', ['SavedSearch'])
# Adding model 'UserSettings'
db.create_table('helpdesk_usersettings', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['auth.User'], unique=True)),
('settings_pickled', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
))
db.send_create_signal('helpdesk', ['UserSettings'])
# Adding model 'IgnoreEmail'
db.create_table('helpdesk_ignoreemail', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('date', self.gf('django.db.models.fields.DateField')(blank=True)),
('email_address', self.gf('django.db.models.fields.CharField')(max_length=150)),
('keep_in_mailbox', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal('helpdesk', ['IgnoreEmail'])
# Adding M2M table for field queues on 'IgnoreEmail'
db.create_table('helpdesk_ignoreemail_queues', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('ignoreemail', models.ForeignKey(orm['helpdesk.ignoreemail'], null=False)),
('queue', models.ForeignKey(orm['helpdesk.queue'], null=False))
))
db.create_unique('helpdesk_ignoreemail_queues', ['ignoreemail_id', 'queue_id'])
# Adding model 'TicketCC'
db.create_table('helpdesk_ticketcc', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('ticket', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['helpdesk.Ticket'])),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, blank=True)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75, null=True, blank=True)),
('can_view', self.gf('django.db.models.fields.BooleanField')(default=False)),
('can_update', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal('helpdesk', ['TicketCC'])
# Adding model 'CustomField'
db.create_table('helpdesk_customfield', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.SlugField')(max_length=50, unique=True, db_index=True)),
('label', self.gf('django.db.models.fields.CharField')(max_length='30')),
('help_text', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('data_type', self.gf('django.db.models.fields.CharField')(max_length=100)),
('max_length', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('decimal_places', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('list_values', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('required', self.gf('django.db.models.fields.BooleanField')(default=False)),
('staff_only', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal('helpdesk', ['CustomField'])
# Adding model 'TicketCustomFieldValue'
db.create_table('helpdesk_ticketcustomfieldvalue', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('ticket', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['helpdesk.Ticket'])),
('field', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['helpdesk.CustomField'])),
('value', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
))
db.send_create_signal('helpdesk', ['TicketCustomFieldValue'])
# Adding unique constraint on 'TicketCustomFieldValue', fields ['ticket', 'field']
db.create_unique('helpdesk_ticketcustomfieldvalue', ['ticket_id', 'field_id'])
def backwards(self, orm):
# Removing unique constraint on 'TicketCustomFieldValue', fields ['ticket', 'field']
db.delete_unique('helpdesk_ticketcustomfieldvalue', ['ticket_id', 'field_id'])
# Deleting model 'Queue'
db.delete_table('helpdesk_queue')
# Deleting model 'Ticket'
db.delete_table('helpdesk_ticket')
# Deleting model 'FollowUp'
db.delete_table('helpdesk_followup')
# Deleting model 'TicketChange'
db.delete_table('helpdesk_ticketchange')
# Deleting model 'Attachment'
db.delete_table('helpdesk_attachment')
# Deleting model 'PreSetReply'
db.delete_table('helpdesk_presetreply')
# Removing M2M table for field queues on 'PreSetReply'
db.delete_table('helpdesk_presetreply_queues')
# Deleting model 'EscalationExclusion'
db.delete_table('helpdesk_escalationexclusion')
# Removing M2M table for field queues on 'EscalationExclusion'
db.delete_table('helpdesk_escalationexclusion_queues')
# Deleting model 'EmailTemplate'
db.delete_table('helpdesk_emailtemplate')
# Deleting model 'KBCategory'
db.delete_table('helpdesk_kbcategory')
# Deleting model 'KBItem'
db.delete_table('helpdesk_kbitem')
# Deleting model 'SavedSearch'
db.delete_table('helpdesk_savedsearch')
# Deleting model 'UserSettings'
db.delete_table('helpdesk_usersettings')
# Deleting model 'IgnoreEmail'
db.delete_table('helpdesk_ignoreemail')
# Removing M2M table for field queues on 'IgnoreEmail'
db.delete_table('helpdesk_ignoreemail_queues')
# Deleting model 'TicketCC'
db.delete_table('helpdesk_ticketcc')
# Deleting model 'CustomField'
db.delete_table('helpdesk_customfield')
# Deleting model 'TicketCustomFieldValue'
db.delete_table('helpdesk_ticketcustomfieldvalue')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'unique': 'True'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '30', 'unique': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'helpdesk.attachment': {
'Meta': {'ordering': "['filename']", 'object_name': 'Attachment'},
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'followup': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['helpdesk.FollowUp']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mime_type': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'size': ('django.db.models.fields.IntegerField', [], {})
},
'helpdesk.customfield': {
'Meta': {'object_name': 'CustomField'},
'data_type': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'decimal_places': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'help_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': "'30'"}),
'list_values': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'max_length': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'unique': 'True', 'db_index': 'True'}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'staff_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'helpdesk.emailtemplate': {
'Meta': {'ordering': "['template_name', 'locale']", 'object_name': 'EmailTemplate'},
'heading': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'html': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'locale': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'plain_text': ('django.db.models.fields.TextField', [], {}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'helpdesk.escalationexclusion': {
'Meta': {'object_name': 'EscalationExclusion'},
'date': ('django.db.models.fields.DateField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'queues': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['helpdesk.Queue']", 'symmetrical': 'False', 'null': 'True', 'blank': 'True'})
},
'helpdesk.followup': {
'Meta': {'ordering': "['date']", 'object_name': 'FollowUp'},
'comment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2011, 4, 27, 15, 17, 4, 272904)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_status': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'ticket': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['helpdesk.Ticket']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'helpdesk.ignoreemail': {
'Meta': {'object_name': 'IgnoreEmail'},
'date': ('django.db.models.fields.DateField', [], {'blank': 'True'}),
'email_address': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keep_in_mailbox': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'queues': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['helpdesk.Queue']", 'symmetrical': 'False', 'null': 'True', 'blank': 'True'})
},
'helpdesk.kbcategory': {
'Meta': {'ordering': "['title']", 'object_name': 'KBCategory'},
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'helpdesk.kbitem': {
'Meta': {'ordering': "['title']", 'object_name': 'KBItem'},
'answer': ('django.db.models.fields.TextField', [], {}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['helpdesk.KBCategory']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'}),
'question': ('django.db.models.fields.TextField', [], {}),
'recommendations': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'votes': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'helpdesk.presetreply': {
'Meta': {'ordering': "['name']", 'object_name': 'PreSetReply'},
'body': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'queues': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['helpdesk.Queue']", 'symmetrical': 'False', 'null': 'True', 'blank': 'True'})
},
'helpdesk.queue': {
'Meta': {'ordering': "('title',)", 'object_name': 'Queue'},
'allow_email_submission': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'allow_public_submission': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'email_box_host': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'email_box_imap_folder': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'email_box_interval': ('django.db.models.fields.IntegerField', [], {'default': "'5'", 'null': 'True', 'blank': 'True'}),
'email_box_last_check': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'email_box_pass': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'email_box_port': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'email_box_ssl': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_box_type': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True', 'blank': 'True'}),
'email_box_user': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'escalate_days': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'locale': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'new_ticket_cc': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_ticket_cc': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'helpdesk.savedsearch': {
'Meta': {'object_name': 'SavedSearch'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'query': ('django.db.models.fields.TextField', [], {}),
'shared': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'helpdesk.ticket': {
'Meta': {'object_name': 'Ticket'},
'assigned_to': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'assigned_to'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_escalation': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'}),
'on_hold': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'priority': ('django.db.models.fields.IntegerField', [], {'default': '3', 'blank': '3'}),
'queue': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['helpdesk.Queue']"}),
'resolution': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'submitter_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'helpdesk.ticketcc': {
'Meta': {'object_name': 'TicketCC'},
'can_update': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_view': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ticket': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['helpdesk.Ticket']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'helpdesk.ticketchange': {
'Meta': {'object_name': 'TicketChange'},
'field': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'followup': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['helpdesk.FollowUp']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'old_value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'helpdesk.ticketcustomfieldvalue': {
'Meta': {'unique_together': "(('ticket', 'field'),)", 'object_name': 'TicketCustomFieldValue'},
'field': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['helpdesk.CustomField']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ticket': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['helpdesk.Ticket']"}),
'value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'helpdesk.usersettings': {
'Meta': {'object_name': 'UserSettings'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'settings_pickled': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['helpdesk']
|
PypiClean
|
/chaipy-0.4.0-py3-none-any.whl/chai_py/deployment.py
|
from pathlib import Path
from typing import AnyStr, Optional
import time
from halo import Halo
from requests import HTTPError
from typing_extensions import TypedDict
import requests
import segno
from chai_py.auth import get_auth
from chai_py.cloud_logs import display_logs, get_logs
from chai_py.defaults import ENDPOINT
from chai_py.notebook_utils import IS_NOTEBOOK, show_qr
def upload_and_deploy(package: AnyStr, bot_uid: str = None) -> str:
"""Uploads the given archive, triggering deployment of the chatbot.
:param package: Path to the packaged chatbot zip.
:param bot_uid: Used to modify an existing bot: the UID of the previously-deployed bot.
:return bot_uid: The UID of the deployed bot.
"""
package = Path(package)
deployment_file = package.parent / "_deployment"
if deployment_file.exists() and bot_uid is None:
with deployment_file.open("r") as f:
previous_bot_uid = f.read().strip()
print("Detected previous deployment from this location. Use the same bot UID as before?")
print(f" [y] (default) Yes. Update the bot ({previous_bot_uid}).")
print(f" [n] No. Deploy as a new bot.")
input_key = input().lower()
if input_key == "y" or input_key == "":
bot_uid = previous_bot_uid
print(f"Using previous bot UID: {bot_uid}")
elif input_key == "n":
pass
else:
raise RuntimeError("Unknown input.")
auth = get_auth()
try:
r = requests.post(
ENDPOINT,
params={'uid': auth.uid, 'key': auth.key, 'bot_uid': bot_uid}
)
try:
r.raise_for_status()
except Exception:
raise RuntimeError(r.json())
except Exception:
print(r.content, r.reason)
raise RuntimeError("Failed to retrieve signed URL.")
url = r.text
if bot_uid is None:
print("Creating new bot.")
bot_uid = parse_signed_url_for_bot_uid(url)
print(f"Received bot UID: {bot_uid}")
with deployment_file.open("w") as f:
f.write(bot_uid)
with package.open("rb") as f:
r = requests.put(url, data=f, headers={'content-type': 'application/zip'})
r.raise_for_status()
print(f"Successfully uploaded {package}.")
return bot_uid
def parse_signed_url_for_bot_uid(url: str):
"""Parses a bot UID from a signed URL.
Assumes the signed URL follows the following scheme:
[...]/{bot_uid}.zip?[...]
:param url:
:return:
"""
endpoint = url.split("?", maxsplit=1)[0]
file_name = endpoint.split("/")[-1]
bot_uid = file_name.split(".")[0]
return bot_uid
def wait_for_deployment(bot_uid: str, sleep: float = 3):
"""Waits for deployment of the bot to complete.
:param bot_uid:
:param sleep: Polling interval in seconds.
:return:
"""
MIN_SLEEP = 1
sleep = max(MIN_SLEEP, sleep)
BOT_DEPLOYMENT_PROCESS = [
"signed_url_created",
"processing_upload",
"deploying",
"initialized",
]
current_deployment_process = -1 # Index for BOT_DEPLOYMENT_PROCESS
completed_processes = []
existing_status = None
try:
_existing_status = get_bot_status(bot_uid)
if "activeDeployment" in _existing_status:
existing_status = _existing_status
print(f"Found previous deployment: Version {existing_status['activeDeployment']['version']}")
elif "failedDeployment" in _existing_status:
existing_status = _existing_status
print(f"Found previous failed deployment: Version {existing_status['failedDeployment']['version']}")
except HTTPError as e:
if e.response.status_code != 404:
raise e
else:
print("Did not find previous deployment.")
MAXIMUM_ERROR_RETRIES = 10
error_retries = 0
start_time = time.time()
with Halo(text="Polling for progress...") as spinner:
spinner.start()
time.sleep(5) # Initial wait to avoid bot status error.
while True:
try:
status = get_bot_status(bot_uid)
except Exception as e:
spinner.warn(f"Error getting bot status (UID: {bot_uid}): {e}")
error_retries += 1
if error_retries > MAXIMUM_ERROR_RETRIES:
spinner.fail()
print(f"Hit retry-on-error limit ({MAXIMUM_ERROR_RETRIES}).")
break
continue
if existing_status is not None:
# Check if new timestamp is later than existing timestamp
existing_deployment = existing_status['activeDeployment'] if 'activeDeployment' in existing_status else existing_status['failedDeployment']
if status['timestamp'] <= existing_deployment['timestamp']:
# Do not parse old version
continue
status_str = status['status']
if status_str not in BOT_DEPLOYMENT_PROCESS:
raise ValueError(f"Unknown status: {status_str}.")
new_current_deployment_process = BOT_DEPLOYMENT_PROCESS.index(status_str)
if new_current_deployment_process != current_deployment_process:
# Completed new step(s)
for step in BOT_DEPLOYMENT_PROCESS[:new_current_deployment_process + 1]:
if step in completed_processes:
continue
spinner.succeed(step)
completed_processes.append(step)
current_deployment_process = new_current_deployment_process
if current_deployment_process + 1 < len(BOT_DEPLOYMENT_PROCESS):
# If next step exists, set spinner to next step
spinner.start(
f"Waiting for next step: {BOT_DEPLOYMENT_PROCESS[current_deployment_process + 1]}"
)
else:
# Next step does not exist; wait for final deployment confirmation
spinner.start("Waiting for active_deployment confirmation...")
if 'activeDeployment' in status:
new_active_deployment = status['activeDeployment']
if existing_status is None \
or ('activeDeployment' in existing_status
and new_active_deployment['version'] > existing_status['activeDeployment']['version']):
spinner.succeed("active_deployment")
print(f"New active deployment: {new_active_deployment}")
break
if 'failedDeployment' in status:
new_failed_deployment = status['failedDeployment']
if existing_status is None \
or ('failedDeployment' in existing_status
and new_failed_deployment['version'] > existing_status['failedDeployment']['version']):
spinner.fail("failed_deployment")
display_logs(
get_logs(
bot_uid=bot_uid,
errors=True
)
)
print("Full logs can be checked with the display_logs and get_logs functions.")
break
elapsed_time = time.time() - start_time
if elapsed_time > 2 * 60:
print("This deployment is taking an unexpectedly long time.")
time.sleep(sleep)
class Deployment(TypedDict):
timestamp: int
version: int
class BotStatus(TypedDict):
status: str
timestamp: int
activeDeployment: Optional[Deployment]
failedDeployment: Optional[Deployment]
def get_bot_status(bot_uid: str) -> BotStatus:
"""Gets the status of the bot.
:param bot_uid:
:return:
"""
auth = get_auth()
try:
req = requests.get(
url=ENDPOINT,
params={
'uid': auth.uid,
'key': auth.key,
'bot_uid': bot_uid,
'item': 'status'
}
)
try:
req.raise_for_status()
except Exception:
raise RuntimeError(req.json())
except Exception:
raise RuntimeError(f"Failed to retrieve status for bot {bot_uid}.")
return req.json()
def share_bot(bot_uid: str) -> str:
"""Displays the url, a QR code, along with additional guidance.
:param bot_uid:
:return: The url for the bot.
"""
url = f"chai://chai.ml/{bot_uid}"
qr_code = segno.make_qr(url)
print("Scan the QR code with your phone to start a chat in the app!")
print(f"Or check it out at {url}")
if IS_NOTEBOOK:
show_qr(qr_code)
else:
qr_code.terminal()
def save():
path = input("Save QR code to: (Press [Enter] for default: 'qr.png') ")
if len(path) == 0:
path = "qr.png"
qr_code.save(path, scale=10)
print(f"Saved QR code to {path}.")
def open_():
qr_code.show(scale=10)
actions = {
"s": (save, "Save this QR code to an image file.")
}
if not IS_NOTEBOOK:
actions["o"] = (open_, "Open QR code in external viewer.")
while True:
print("\nEnter one of the following keys to perform additional actions (or [Enter] to exit):")
for key, action in actions.items():
print(f" [{key}] {action[1]}")
input_key = input().lower()
if input_key in actions:
actions[input_key][0]()
else:
print("Exiting.")
break
return url
def delete_bot(bot_uid: str) -> str:
"""
Uses an HTTPS request to trigger deletion of bot with specified UID.
:param bot_uid:
:return: The url for the bot.
"""
auth = get_auth()
try:
req = requests.delete(
url=ENDPOINT,
params={'uid': auth.uid, 'key': auth.key, 'bot_uid': bot_uid}
)
try:
req.raise_for_status()
except Exception:
raise RuntimeError(req.json())
except Exception as exc:
raise RuntimeError(f"Failed to delete bot {bot_uid}.")
print(f"Successfully deleted {bot_uid}.")
return bot_uid
advertise_deployed_bot = share_bot
|
PypiClean
|
/kurby-0.3.3.tar.gz/kurby-0.3.3/README.md
|
# Kurby
[](https://docs.python.org/3/whatsnew/3.7.html) [](https://pepy.tech/badge/kurby) [](https://badge.fury.io/py/kurby.svg)
<div align="center">
<img src="docs/kurby.png" alt="Kirby ball in Kirby: Canvas Curse" height=400, width=400/>
</div>
<br>
Kurby is a nice and simple CLI that use [Twist](https://twist.moe) website, and their huge collection to download animes for free and **automatically**
Animes from Twist are generally in High Definition with English subtitles. Please consider [donating](https://twist.moe) for their amazing work :)
## Overview
The CLI is built with [Typer](https://github.com/tiangolo/typer) and it is composed of 3 commands
- `animes`: Search animes to find what is available and extract the slug to be used in the other commands

> `--search` option allows you to make a fuzzy search
- `details`: Show details of an anime if needed

> You can see the number of episodes available and when the first and last episode were uploaded
- `download`: Choose and download which episodes of the anime you want !

> Because sometimes bad things can happen, Kurby will automatically retry for you
You can also download without having a slug initially. In this case, Kurby will propose you a selection

#### And that's it !
You can also use `kurby --help` to get more information on the different commands
## Installation
```bash
pip install kurby
kurby --help
```
## Installation on Windows
- Right click on the `install.bat` and run it as an **Administrator**, this will install [Chocolatey](https://chocolatey.org/) who will manage the python installation
- Once the installation is finished, and you're asked to press a key, open a new terminal (`Win + R` > `cmd` )
- You can now start using Kurby with `kurby --help`
## Installation on Android without root needed
- Install [Termux](https://play.google.com/store/apps/details?id=com.termux) on your Android phone
- In Termux, run this command to allow access to storage: `termux-setup-storage`, and tap allow
- Next, run the follow command `pkg install git python`
- Then `pip3 install kurby`
- You're done ! You can download animes on your phone like so `kurby download --d ~/storage/shared`
##### *Thanks to [6b86b3ac03c167320d93](https://www.reddit.com/user/6b86b3ac03c167320d93/) for this tutorial*
## Installation from sources
### Create your virtual environment (optional)
````bash
mkvirtualenv kurby
workon kurby
````
### Install poetry
```bash
curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python -
```
### Install dependencies using poetry
```bash
poetry install --no-dev
kurby-cli --help # or python kurby-cli --help
```
## Getting the latest episode automatically
An interesting use case is to get the latest episode of an anime as soon as it is available.
This is possible on Linux using `crontab` (or another equivalent on others OS) and _a little tweak of chemistry_.
Here is an example of a few instructions that can help you do this.
```shell
now=$(date -u "+%Y-%m-%dT%H:%M:%S")
date=$(cat kurby.date || echo $now) # Get the date of previous download
python kurby download {{YOUR_ANIME}} --dfrom=${DATE} # Download any episodes that has been uploaded since the last time
echo $now > kurby.date # Store the current date as the new date
```
## Next steps
Kurby is already functional as it is but here are the next things I would like to add :
- Adding the support of asynchronous download
- Refactor the retrying strategy to add more customisable options and allow errors during a download without interruption
- Refactor the crawling process to potentially avoid being detected as a bot
### Disclaimer
Downloading copyright videos may be illegal in your country.
This tool is for educational purposes only.
The developers or this application do not store or distribute any files whatsoever.
|
PypiClean
|
/suds3-0.1.tar.gz/suds3-0.1/suds/xsd/deplist.py
|
from logging import getLogger
from suds import *
log = getLogger(__name__)
class DepList:
"""
Dependancy solving list.
Items are tuples: (object, (deps,))
@ivar raw: The raw (unsorted) items.
@type raw: list
@ivar index: The index of (unsorted) items.
@type index: list
@ivar stack: The sorting stack.
@type stack: list
@ivar pushed: The I{pushed} set tracks items that have been
processed.
@type pushed: set
@ivar sorted: The sorted list of items.
@type sorted: list
"""
def __init__(self):
""" """
self.unsorted = []
self.index = {}
self.stack = []
self.pushed = set()
self.sorted = None
def add(self, *items):
"""
Add items to be sorted.
@param items: One or more items to be added.
@type items: I{item}
@return: self
@rtype: L{DepList}
"""
for item in items:
self.unsorted.append(item)
key = item[0]
self.index[key] = item
return self
def sort(self):
"""
Sort the list based on dependancies.
@return: The sorted items.
@rtype: list
"""
self.sorted = list()
self.pushed = set()
for item in self.unsorted:
popped = []
self.push(item)
while len(self.stack):
try:
top = self.top()
ref = top[1].next()
refd = self.index.get(ref)
if refd is None:
log.debug('"%s" not found, skipped', Repr(ref))
continue
self.push(refd)
except StopIteration:
popped.append(self.pop())
continue
for p in popped:
self.sorted.append(p)
self.unsorted = self.sorted
return self.sorted
def top(self):
"""
Get the item at the top of the stack.
@return: The top item.
@rtype: (item, iter)
"""
return self.stack[-1]
def push(self, item):
"""
Push and item onto the sorting stack.
@param item: An item to push.
@type item: I{item}
@return: The number of items pushed.
@rtype: int
"""
if item in self.pushed:
return
frame = (item, iter(item[1]))
self.stack.append(frame)
self.pushed.add(item)
def pop(self):
"""
Pop the top item off the stack and append
it to the sorted list.
@return: The popped item.
@rtype: I{item}
"""
try:
frame = self.stack.pop()
return frame[0]
except:
pass
if __name__ == '__main__':
a = ('a', ('x',))
b = ('b', ('a',))
c = ('c', ('a','b'))
d = ('d', ('c',))
e = ('e', ('d','a'))
f = ('f', ('e','c','d','a'))
x = ('x', ())
L = DepList()
L.add(c, e, d, b, f, a, x)
print([x[0] for x in L.sort()])
|
PypiClean
|
/acconeer_exptool-7.4.0-py3-none-any.whl/acconeer/exptool/a111/algo/obstacle_detection/ui.py
|
import logging
import numpy as np
from numpy import unravel_index
from PySide6 import QtCore, QtGui
import pyqtgraph as pg
import acconeer.exptool as et
from .constants import WAVELENGTH
log = logging.getLogger("acconeer.exptool.examples.obstacle_detection")
class PGUpdater:
def __init__(self, sensor_config, processing_config, session_info):
self.sensor_config = sensor_config
self.map_max = 0
self.width = 3
self.max_velocity = WAVELENGTH / 4 * self.sensor_config.update_rate # cm/s
self.peak_hist_len = processing_config.peak_hist
self.dist_index = processing_config.downsampling
self.nr_locals = processing_config.nr_peaks
self.downsampling = processing_config.downsampling
self.threshold = processing_config.static_threshold
self.sensor_separation = processing_config.sensor_separation
self.fft_bg_data = None
self.threshold_data = None
self.hist_plots = {
"velocity": [[], processing_config.velocity_history],
"angle": [[], processing_config.angle_history],
"distance": [[], processing_config.distance_history],
"amplitude": [[], processing_config.amplitude_history],
}
self.num_hist_plots = 0
for hist in self.hist_plots:
if hist[1]:
self.num_hist_plots += 1
self.advanced_plots = {
"background_map": processing_config.background_map,
"threshold_map": processing_config.threshold_map,
"show_line_outs": processing_config.show_line_outs,
}
def setup(self, win):
win.setWindowTitle("Acconeer obstacle detection example")
row_idx = 0
self.env_ax = win.addPlot(row=row_idx, col=0, colspan=4, title="Envelope and max FFT")
self.env_ax.setMenuEnabled(False)
self.env_ax.setMouseEnabled(x=False, y=False)
self.env_ax.hideButtons()
self.env_ax.setLabel("bottom", "Depth (cm)")
self.env_ax.setXRange(*(self.sensor_config.range_interval * 100))
self.env_ax.showGrid(True, True)
self.env_ax.addLegend(offset=(-10, 10))
self.env_ax.setYRange(0, 0.1)
self.env_ampl = self.env_ax.plot(pen=et.utils.pg_pen_cycler(0), name="Envelope")
self.fft_max = self.env_ax.plot(pen=et.utils.pg_pen_cycler(1, "--"), name="FFT @ max")
if self.advanced_plots["show_line_outs"]:
self.fft_bg = self.env_ax.plot(pen=et.utils.pg_pen_cycler(2, "--"), name="BG @ max")
self.fft_thresh = self.env_ax.plot(
pen=et.utils.pg_pen_cycler(3, "--"), name="Threshold @ max"
)
self.peak_dist_text = pg.TextItem(color="k", anchor=(0, 1))
self.env_ax.addItem(self.peak_dist_text)
self.peak_dist_text.setPos(self.sensor_config.range_start * 100, 0)
self.peak_dist_text.setZValue(3)
self.env_peak_vline = pg.InfiniteLine(
pos=0, angle=90, pen=pg.mkPen(width=2, style=QtCore.Qt.DotLine)
)
self.env_ax.addItem(self.env_peak_vline)
row_idx += 1
self.obstacle_ax = win.addPlot(
row=row_idx, col=0, colspan=self.num_hist_plots, title="Obstacle map"
)
self.obstacle_ax.setMenuEnabled(False)
self.obstacle_ax.setMouseEnabled(x=False, y=False)
self.obstacle_ax.hideButtons()
self.obstacle_im = pg.ImageItem()
self.obstacle_ax.setLabel("bottom", "Velocity (cm/s)")
self.obstacle_ax.setLabel("left", "Distance (cm)")
self.obstacle_im.setLookupTable(et.utils.pg_mpl_cmap("viridis"))
self.obstacle_ax.addItem(self.obstacle_im)
self.obstacle_ax.setXRange(-self.max_velocity, self.max_velocity)
self.obstacle_ax.setYRange(*self.sensor_config.range_interval * 100)
self.obstacle_ax.setXRange(-self.max_velocity, self.max_velocity)
self.obstacle_ax.setYRange(*self.sensor_config.range_interval * 100)
self.obstacle_peak = pg.ScatterPlotItem(brush=pg.mkBrush("k"), size=15)
self.obstacle_ax.addItem(self.obstacle_peak)
self.peak_fft_text = pg.TextItem(color="w", anchor=(0, 1))
self.obstacle_ax.addItem(self.peak_fft_text)
self.peak_fft_text.setPos(-self.max_velocity, self.sensor_config.range_start * 100)
self.peak_val_text = pg.TextItem(color="w", anchor=(0, 0))
self.obstacle_ax.addItem(self.peak_val_text)
self.peak_val_text.setPos(-self.max_velocity, self.sensor_config.range_end * 100)
self.bg_estimation_text = pg.TextItem(color="w", anchor=(0, 1))
self.obstacle_ax.addItem(self.bg_estimation_text)
self.bg_estimation_text.setPos(-self.max_velocity, self.sensor_config.range_start * 100)
row_idx += 1
if self.advanced_plots["background_map"]:
self.obstacle_bg_ax = win.addPlot(
row=row_idx, col=0, colspan=self.num_hist_plots, title="Obstacle background"
)
self.obstacle_bg_ax.setMenuEnabled(False)
self.obstacle_bg_ax.setMouseEnabled(x=False, y=False)
self.obstacle_bg_ax.hideButtons()
self.obstacle_bg_im = pg.ImageItem()
self.obstacle_bg_ax.setLabel("bottom", "Velocity (cm/s)")
self.obstacle_bg_ax.setLabel("left", "Distance (cm)")
self.obstacle_bg_im.setLookupTable(et.utils.pg_mpl_cmap("viridis"))
self.obstacle_bg_ax.addItem(self.obstacle_bg_im)
row_idx += 1
if self.advanced_plots["threshold_map"]:
self.obstacle_thresh_ax = win.addPlot(
row=row_idx, col=0, colspan=self.num_hist_plots, title="Obstacle threshold"
)
self.obstacle_thresh_ax.setMenuEnabled(False)
self.obstacle_thresh_ax.setMouseEnabled(x=False, y=False)
self.obstacle_thresh_ax.hideButtons()
self.obstacle_thresh_im = pg.ImageItem()
self.obstacle_thresh_ax.setLabel("bottom", "Velocity (cm/s)")
self.obstacle_thresh_ax.setLabel("left", "Distance (cm)")
self.obstacle_thresh_im.setLookupTable(et.utils.pg_mpl_cmap("viridis"))
self.obstacle_thresh_ax.addItem(self.obstacle_thresh_im)
row_idx += 1
hist_col = 0
row_idx += self.num_hist_plots
if self.hist_plots["distance"][1]:
self.peak_hist_ax_l = win.addPlot(row=row_idx, col=hist_col, title="Distance history")
self.peak_hist_ax_l.setMenuEnabled(False)
self.peak_hist_ax_l.setMouseEnabled(x=False, y=False)
self.peak_hist_ax_l.hideButtons()
self.peak_hist_ax_l.setLabel("bottom", "Sweep")
self.peak_hist_ax_l.setXRange(0, self.peak_hist_len)
self.peak_hist_ax_l.showGrid(True, True)
self.peak_hist_ax_l.addLegend(offset=(-10, 10))
self.peak_hist_ax_l.setYRange(
self.sensor_config.range_start * 100, self.sensor_config.range_end * 100
)
hist_col += 1
if self.hist_plots["velocity"][1]:
self.peak_hist_ax_c = win.addPlot(row=row_idx, col=hist_col, title="Velocity history")
self.peak_hist_ax_c.setMenuEnabled(False)
self.peak_hist_ax_c.setMouseEnabled(x=False, y=False)
self.peak_hist_ax_c.hideButtons()
self.peak_hist_ax_c.setLabel("bottom", "Sweep")
self.peak_hist_ax_c.setXRange(0, self.peak_hist_len)
limit = np.round(self.max_velocity / 10) * 10
if limit < 1.0:
limit = self.max_velocity
self.peak_hist_ax_c.setYRange(-limit, limit)
self.peak_hist_ax_c.showGrid(True, True)
self.peak_hist_ax_c.addLegend(offset=(-10, 10))
hist_col += 1
if self.hist_plots["angle"][1]:
self.peak_hist_ax_r = win.addPlot(row=row_idx, col=hist_col, title="Angle history")
self.peak_hist_ax_r.setMenuEnabled(False)
self.peak_hist_ax_r.setMouseEnabled(x=False, y=False)
self.peak_hist_ax_r.hideButtons()
self.peak_hist_ax_r.setLabel("bottom", "Sweep")
self.peak_hist_ax_r.setXRange(0, self.peak_hist_len)
self.peak_hist_ax_r.showGrid(True, True)
self.peak_hist_ax_r.addLegend(offset=(-10, 10))
self.peak_hist_ax_r.setYRange(-100, 100)
hist_col += 1
if self.hist_plots["amplitude"][1]:
self.peak_hist_ax_r1 = win.addPlot(
row=row_idx, col=hist_col, title="Amplitude history"
)
self.peak_hist_ax_r1.setMenuEnabled(False)
self.peak_hist_ax_r1.setMouseEnabled(x=False, y=False)
self.peak_hist_ax_r1.hideButtons()
self.peak_hist_ax_r1.setLabel("bottom", "Sweep")
self.peak_hist_ax_r1.setXRange(0, self.peak_hist_len)
self.peak_hist_ax_r1.showGrid(True, True)
self.peak_hist_ax_r1.addLegend(offset=(-10, 10))
hist_col += 1
for i in range(self.nr_locals):
if self.hist_plots["velocity"][1]:
self.hist_plots["velocity"][0].append(
self.peak_hist_ax_c.plot(
pen=et.utils.pg_pen_cycler(i), name="Veloctiy {:d}".format(i)
)
)
if self.hist_plots["angle"][1]:
self.hist_plots["angle"][0].append(
self.peak_hist_ax_r.plot(
pen=et.utils.pg_pen_cycler(i), name="Angle {:d}".format(i)
)
)
if self.hist_plots["distance"][1]:
self.hist_plots["distance"][0].append(
self.peak_hist_ax_l.plot(
pen=et.utils.pg_pen_cycler(i), name="Distance {:d}".format(i)
)
)
if self.hist_plots["amplitude"][1]:
self.hist_plots["amplitude"][0].append(
self.peak_hist_ax_r1.plot(
pen=et.utils.pg_pen_cycler(i), name="Amplitude {:d}".format(i)
)
)
self.smooth_max = et.utils.SmoothMax(
self.sensor_config.update_rate,
tau_decay=0.2,
tau_grow=0.2,
)
self.plot_index = 0
def update(self, data):
nfft = data["fft_map"].shape[2]
if self.plot_index == 0:
pos0 = self.sensor_config.range_start * 100
num_points = data["env_ampl"].size
self.env_xs = np.linspace(*self.sensor_config.range_interval * 100, num_points)
self.peak_x = self.env_xs[data["peak_idx"]]
tr = QtGui.QTransform()
tr.translate(-self.max_velocity, pos0)
tr.scale(
2 * self.max_velocity / nfft,
self.sensor_config.range_length * 100 / num_points,
)
self.obstacle_im.setTransform(tr)
if self.advanced_plots["background_map"]:
tr.translate(-self.max_velocity, pos0)
tr.scale(
2 * self.max_velocity / nfft,
self.sensor_config.range_length * 100 / num_points,
)
self.obstacle_bg_im.setTransform(tr)
if self.advanced_plots["threshold_map"]:
tr.translate(-self.max_velocity, pos0)
tr.scale(
2 * self.max_velocity / nfft,
self.sensor_config.range_length * 100 / num_points,
)
self.obstacle_thresh_im.setTransform(tr)
else:
self.peak_x = self.peak_x * 0.7 + 0.3 * self.env_xs[data["peak_idx"]]
peak_dist_text = "Peak: {:.1f} cm".format(self.peak_x)
peak_fft_text = "No peaks found"
if data["fft_peaks"] is not None:
dist = self.env_xs[data["fft_peaks"][:, 0].astype(int)]
vel = (data["fft_peaks"][:, 1] / data["fft_map"].shape[2] * 2 - 1) * self.max_velocity
peak_fft_text = "Dist: {:.1f}cm, Speed/Angle: {:.1f}cm/s / {:.0f}".format(
dist[0], data["velocity"], data["angle"]
)
half_pixel = self.max_velocity / np.floor(data["fft_map"].shape[2] / 2) / 2
self.obstacle_peak.setData(vel + half_pixel, dist)
else:
self.obstacle_peak.setData([], [])
if data["fft_bg_iterations_left"]:
bg_text = "Stay clear of sensors, estimating background! {} iterations left"
bg_text = bg_text.format(data["fft_bg_iterations_left"])
peak_fft_text = ""
else:
bg_text = ""
for i in range(self.nr_locals):
if self.hist_plots["distance"][1]:
self.hist_plots["distance"][0][i].setData(
np.arange(len(data["peak_hist"][i, 0, :])), data["peak_hist"][i, 0, :]
)
if self.hist_plots["velocity"][1]:
self.hist_plots["velocity"][0][i].setData(
np.arange(len(data["peak_hist"][i, 1, :])), data["peak_hist"][i, 1, :]
)
if self.hist_plots["angle"][1]:
self.hist_plots["angle"][0][i].setData(
np.arange(len(data["peak_hist"][i, 2, :])), data["peak_hist"][i, 2, :]
)
if self.hist_plots["amplitude"][1]:
self.hist_plots["amplitude"][0][i].setData(
np.arange(len(data["peak_hist"][i, 3, :])), data["peak_hist"][i, 3, :]
)
map_max = np.max(np.max(data["fft_map"][0]))
self.peak_dist_text.setText(peak_dist_text)
self.peak_fft_text.setText(peak_fft_text)
self.bg_estimation_text.setText(bg_text)
self.env_ampl.setData(self.env_xs, data["env_ampl"])
self.env_peak_vline.setValue(self.peak_x)
fft_max = np.max(data["fft_max_env"])
env_max = np.max(data["env_ampl"])
env_max = max(env_max, fft_max)
self.fft_max.setData(self.env_xs, data["fft_max_env"])
if data["fft_bg"] is not None:
self.fft_bg_data = data["fft_bg"]
if self.advanced_plots["show_line_outs"]:
max_index = 8
max_bg = None
if data["fft_peaks"] is not None:
max_index = int(data["fft_peaks"][0, 1])
else:
try:
max_index = np.asarray(
unravel_index(np.argmax(data["fft_map"][0]), data["fft_map"][0].shape)
)[1]
except Exception:
pass
if self.fft_bg_data is not None:
max_bg = self.fft_bg_data[:, max_index]
self.fft_bg.setData(self.env_xs, max_bg)
env_max = max(np.max(max_bg), env_max)
if data["threshold_map"] is not None:
self.threshold_data = data["threshold_map"]
if self.threshold_data is not None:
thresh_max = self.threshold_data[:, max_index]
if max_bg is not None:
thresh_max = thresh_max + max_bg
env_max = max(np.max(thresh_max), env_max)
self.fft_thresh.setData(self.env_xs, thresh_max)
self.env_ax.setYRange(0, self.smooth_max.update(env_max))
fft_data = data["fft_map"][0].T
if self.fft_bg_data is not None:
max_wo_bg = map_max
fft_data = fft_data - self.fft_bg_data.T
fft_data[fft_data < 0] = 0
map_max = np.max(fft_data)
self.peak_val_text.setText("FFT max: {:.3f} ({:.3f})".format(map_max, max_wo_bg))
else:
self.peak_val_text.setText("FFT max: {:.3f}".format(map_max))
g = 1 / 2.2
fft_data = 254 / (map_max + 1.0e-9) ** g * fft_data**g
fft_data[fft_data > 254] = 254
map_min = -1
map_max = 257
self.obstacle_im.updateImage(fft_data, levels=(map_min, map_max))
if data["threshold_map"] is not None and self.advanced_plots["threshold_map"]:
thresh_max = np.max(data["threshold_map"])
levels = (0, thresh_max * 1.05)
self.obstacle_thresh_im.updateImage(data["threshold_map"].T, levels=levels)
if data["fft_bg"] is not None and self.advanced_plots["background_map"]:
map_max = np.max(np.max(data["fft_bg"]))
fft_data = data["fft_bg"].T
fft_data = 254 / (map_max + 1e-6) ** g * fft_data**g
fft_data[fft_data > 254] = 254
map_min = -1
map_max = 257
self.obstacle_bg_im.updateImage(fft_data, levels=(map_min, map_max))
self.plot_index += 1
|
PypiClean
|
/Camper_Logger_Exception-0.2.0-py3-none-any.whl/Camper/Log.py
|
import logging
import colorlog
from .Static import *
from Camper.ExceptionCatcher import CamperException
def error_handler(err):
print(err)
class CamperLogger:
@CamperException.exception_catcher(error_callback=error_handler)
def __init__(self, logger_name, **kwargs):
self.name = logger_name
self.debug = kwargs.get('debug')
self.log_path = kwargs.get('log_path')
self.record = kwargs.get('record')
self.logger = None
self.__init_logger()
@CamperException.exception_catcher(error_callback=error_handler)
def __init_logger(self):
colorlog.basicConfig(format=colorlog_format)
self.logger = logging.getLogger(self.name)
if self.debug:
self.logger.setLevel(logging.DEBUG)
else:
self.logger.setLevel(logging.INFO)
if self.log_path is None or self.log_path == '' or type(self.log_path) != str:
self.log_path = './'
elif not self.log_path.endswith('/'):
self.log_path += '/'
@CamperException.exception_catcher(error_callback=error_handler)
def warning(self, message, file_name="app-warning"):
if self.logger is not None and message is not None:
if self.record:
file_name = "app-warning" if file_name is None or file_name == "" else file_name
file_name = file_name.replace(".", "")
fh = logging.FileHandler(self.log_path + (file_name + '.log'))
fh.setLevel(logging.WARNING)
formatter = logging.Formatter(log_format)
fh.setFormatter(formatter)
self.logger.addHandler(fh)
self.logger.warning(message)
self.logger.removeHandler(fh)
else:
self.logger.warning(message)
@CamperException.exception_catcher(error_callback=error_handler)
def error(self, message, file_name="app-error"):
if self.logger is not None and message is not None:
if self.record:
file_name = "app-error" if file_name is None or file_name == "" else file_name
file_name = file_name.replace(".", "")
fh = logging.FileHandler(self.log_path + (file_name + '.log'))
fh.setLevel(logging.ERROR)
formatter = logging.Formatter(log_format)
fh.setFormatter(formatter)
self.logger.addHandler(fh)
self.logger.error(message)
self.logger.removeHandler(fh)
else:
self.logger.error(message)
@CamperException.exception_catcher(error_callback=error_handler)
def info(self, message, file_name="app-info"):
if self.logger is not None and message is not None:
if self.record:
file_name = "app-info" if file_name is None or file_name == "" else file_name
file_name = file_name.replace(".", "")
fh = logging.FileHandler(self.log_path + (file_name + '.log'))
fh.setLevel(logging.INFO)
formatter = logging.Formatter(log_format)
fh.setFormatter(formatter)
self.logger.addHandler(fh)
self.logger.info(message)
self.logger.removeHandler(fh)
else:
self.logger.info(message)
|
PypiClean
|
/SPyOD-0.1.7.tar.gz/SPyOD-0.1.7/README.md
|
# Spectral Proper Orthogonal Decomposition
## Table of contents
* [Description](#description)
* [Installation](#installation)
* [License](#license)
* [Contact us](#contact-us)
* [Contributors](#contributors)
## Description
__SPyOD__ is the python implementation of the Spectral Proper Orthogonal Decomposition published by [Sieber et al. in 2016](https://www.cambridge.org/core/journals/journal-of-fluid-mechanics/article/spectral-proper-orthogonal-decomposition/DCD8A6EDEFD56F5A9715DBAD38BD461A).
It includes two __.py-files__:
- `spod.py` - Includes the function `spod` which calculates the SPOD and
- `findpairs.py` - Includes the post-processing of the SPOD in the function `findpairs` which finds linked modes as described in [Sieber et al. in 2016](https://www.cambridge.org/core/journals/journal-of-fluid-mechanics/article/spectral-proper-orthogonal-decomposition/DCD8A6EDEFD56F5A9715DBAD38BD461A)
and one __jupyter notebook__ example `example_SPOD_free_jet.ipynb` of the SPOD of experimental PIV data from a free jet flow. The data are stored in `PIV_jext_example.mat`.
The paper describing the SPOD method is made publicly available by the TU Berlin at the following link: [https://doi.org/10.14279/depositonce-6377](https://doi.org/10.14279/depositonce-6377).
## Installation
The __SPyOD__ package can be installed using the following command:
```bash
$ pip install SPyOD
```
The package can be imported by
```bash
$ from spyod.spod import spod
$ from spyod.findpairs import findpairs
```
## License
__SPyOD__ is released under the MIT License. Please have a look at [LICENSE.md](LICENSE.md) for more details.
## Contact us
The best way to contribute is to report any issues you mention for improvement and extension of the code or documentation as well as fixing bugs etc. Feel free to contact us.
## Contributors
<!-- ALL-CONTRIBUTORS-LIST:START - Do not remove or modify this section -->
<!-- prettier-ignore-start -->
<!-- markdownlint-disable -->
<table>
<tbody>
<tr>
<td align="center"><a href="https://github.com/grigorishat"><img src="https://avatars.githubusercontent.com/u/114856563?s=400&u=9eea6aaba80fe841c18c8a621111e2d9f3da63ed&v=4" width="100px;" alt="Grigorios Hatzissawidis"/><br /><sub><b>Grigorios Hatzissawidis</b></sub></td>
<td align="center"><a href="https://github.com/morsieber"><img src="https://avatars.githubusercontent.com/u/116639701?v=4" width="100px;" alt="Moritz Sieber"/><br /><sub><b>Moritz Sieber</b></sub></td>
</tbody>
</table>
<!-- markdownlint-restore -->
<!-- prettier-ignore-end -->
<!-- ALL-CONTRIBUTORS-LIST:END -->
|
PypiClean
|
/HolmesIV-2021.9.8a1.tar.gz/HolmesIV-2021.9.8a1/mycroft/skills/intent_services/padatious_service.py
|
"""Intent service wrapping padatious."""
from functools import lru_cache
from subprocess import call
from threading import Event
from time import time as get_time, sleep
from os.path import expanduser, isfile
from mycroft.configuration import Configuration
from mycroft.messagebus.message import Message
from mycroft.util.log import LOG
from mycroft.skills.intent_services.base import IntentMatch
from padaos import IntentContainer as PadaosIntentContainer
class PadatiousService:
"""Service class for padatious intent matching."""
def __init__(self, bus, config):
self.padatious_config = config
self.bus = bus
intent_cache = expanduser(self.padatious_config['intent_cache'])
self._padaos = self.padatious_config.get("padaos_only", False)
try:
if not self._padaos:
from padatious import IntentContainer
self.container = IntentContainer(intent_cache)
except ImportError:
LOG.error('Padatious not installed. Falling back to Padaos, pure regex alternative')
try:
call(['notify-send', 'Padatious not installed',
'Falling back to Padaos, pure regex alternative'])
except OSError:
pass
self._padaos = True
if self._padaos:
LOG.warning('using padaos instead of padatious. Some intents may '
'be hard to trigger')
self.container = PadaosIntentContainer()
self.bus.on('padatious:register_intent', self.register_intent)
self.bus.on('padatious:register_entity', self.register_entity)
self.bus.on('detach_intent', self.handle_detach_intent)
self.bus.on('detach_skill', self.handle_detach_skill)
self.bus.on('mycroft.skills.initialized', self.train)
self.finished_training_event = Event()
self.finished_initial_train = False
self.train_delay = self.padatious_config['train_delay']
self.train_time = get_time() + self.train_delay
self.registered_intents = []
self.registered_entities = []
def train(self, message=None):
"""Perform padatious training.
Args:
message (Message): optional triggering message
"""
self.finished_training_event.clear()
if not self._padaos:
padatious_single_thread = Configuration.get()[
'padatious']['single_thread']
if message is None:
single_thread = padatious_single_thread
else:
single_thread = message.data.get('single_thread',
padatious_single_thread)
LOG.info('Training... (single_thread={})'.format(single_thread))
self.container.train(single_thread=single_thread)
LOG.info('Training complete.')
self.finished_training_event.set()
if not self.finished_initial_train:
self.bus.emit(Message('mycroft.skills.trained'))
self.finished_initial_train = True
def wait_and_train(self):
"""Wait for minimum time between training and start training."""
if not self.finished_initial_train:
return
sleep(self.train_delay)
if self.train_time < 0.0:
return
if self.train_time <= get_time() + 0.01:
self.train_time = -1.0
self.train()
def __detach_intent(self, intent_name):
""" Remove an intent if it has been registered.
Args:
intent_name (str): intent identifier
"""
if intent_name in self.registered_intents:
self.registered_intents.remove(intent_name)
self.container.remove_intent(intent_name)
def handle_detach_intent(self, message):
"""Messagebus handler for detaching padatious intent.
Args:
message (Message): message triggering action
"""
self.__detach_intent(message.data.get('intent_name'))
def handle_detach_skill(self, message):
"""Messagebus handler for detaching all intents for skill.
Args:
message (Message): message triggering action
"""
skill_id = message.data['skill_id']
remove_list = [i for i in self.registered_intents if skill_id in i]
for i in remove_list:
self.__detach_intent(i)
def _register_object(self, message, object_name, register_func):
"""Generic method for registering a padatious object.
Args:
message (Message): trigger for action
object_name (str): type of entry to register
register_func (callable): function to call for registration
"""
file_name = message.data['file_name']
name = message.data['name']
LOG.debug('Registering Padatious ' + object_name + ': ' + name)
if not isfile(file_name):
LOG.warning('Could not find file ' + file_name)
return
if self._padaos:
# padaos does not accept a file path like padatious
with open(file_name) as f:
samples = [l.strip() for l in f.readlines()]
register_func(name, samples)
else:
register_func(name, file_name)
self.train_time = get_time() + self.train_delay
self.wait_and_train()
def register_intent(self, message):
"""Messagebus handler for registering intents.
Args:
message (Message): message triggering action
"""
self.registered_intents.append(message.data['name'])
if self._padaos:
self._register_object(message, 'intent', self.container.add_intent)
else:
self._register_object(message, 'intent', self.container.load_intent)
def register_entity(self, message):
"""Messagebus handler for registering entities.
Args:
message (Message): message triggering action
"""
self.registered_entities.append(message.data)
if self._padaos:
self._register_object(message, 'intent', self.container.add_entity)
else:
self._register_object(message, 'entity', self.container.load_entity)
def _match_level(self, utterances, limit):
"""Match intent and make sure a certain level of confidence is reached.
Args:
utterances (list of tuples): Utterances to parse, originals paired
with optional normalized version.
limit (float): required confidence level.
"""
padatious_intent = None
LOG.debug('Padatious Matching confidence > {}'.format(limit))
for utt in utterances:
for variant in utt:
intent = self.calc_intent(variant)
if self._padaos:
if not intent.get("name"):
continue
# exact matches only
return IntentMatch(
'Padaos',
intent["name"],
intent["entities"],
intent["name"].split(':')[0]
)
if intent:
best = padatious_intent.conf if padatious_intent else 0.0
if best < intent.conf:
padatious_intent = intent
padatious_intent.matches['utterance'] = utt[0]
if padatious_intent and padatious_intent.conf > limit:
skill_id = padatious_intent.name.split(':')[0]
ret = IntentMatch(
'Padatious', padatious_intent.name, padatious_intent.matches,
skill_id
)
else:
ret = None
return ret
def match_high(self, utterances, _=None, __=None):
"""Intent matcher for high confidence.
Args:
utterances (list of tuples): Utterances to parse, originals paired
with optional normalized version.
"""
return self._match_level(utterances, 0.95)
def match_medium(self, utterances, _=None, __=None):
"""Intent matcher for medium confidence.
Args:
utterances (list of tuples): Utterances to parse, originals paired
with optional normalized version.
"""
return self._match_level(utterances, 0.8)
def match_low(self, utterances, _=None, __=None):
"""Intent matcher for low confidence.
Args:
utterances (list of tuples): Utterances to parse, originals paired
with optional normalized version.
"""
return self._match_level(utterances, 0.5)
@lru_cache(maxsize=2) # 2 catches both raw and normalized utts in cache
def calc_intent(self, utt):
"""Cached version of container calc_intent.
This improves speed when called multiple times for different confidence
levels.
NOTE: This cache will keep a reference to this class
(PadatiousService), but we can live with that since it is used as a
singleton.
Args:
utt (str): utterance to calculate best intent for
"""
return self.container.calc_intent(utt)
|
PypiClean
|
/invoicing_as_pdf-1.0.0-py3-none-any.whl/invoicing/invoice.py
|
import os
import pandas as pd
import glob
from fpdf import FPDF
from pathlib import Path
def generate(invoices_path, pdfs_path,image_path,product_id,product_name,amount_purchased,price_per_unit,total_price):
'''
This Function coverts invoice Excel files into PDF invoices
:param invoices_path:
:param pdfs_path:
:param image_path:
:param product_id:
:param product_name:
:param amount_purchased:
:param price_per_unit:
:param total_price:
:return:
'''
filepaths=glob.glob(f"{invoices_path}/*xlsx")
# Create the PDF directory if it doesn't exist
if not os.path.exists(pdfs_path):
os.makedirs(pdfs_path)
for filepath in filepaths:
df = pd.read_excel(filepath,sheet_name="Sheet 1")
pdf = FPDF(orientation="P",unit="mm",format="A4")
pdf.add_page()
filename = Path(filepath).stem
invoice_nr,date = filename.split("-")
pdf.set_font(family="Times", size=16, style="B")
pdf.cell(w=50,h=8,txt=f"Invoice nr.{invoice_nr}",ln=1)
pdf.set_font(family="Times",size=16,style="B")
pdf.cell(w=50,h=8,txt=f"Date:.{date}",ln=1)
df = pd.read_excel(filepath,sheet_name="Sheet 1")
#ADD A HEADER
columns = list(df.columns)
columns = [item.replace("_"," ").title() for item in columns]
pdf.set_font(family="Times",size=10,style="B")
pdf.set_text_color(80,80,80)
pdf.cell(w=30,h=8,txt=columns[0],border=1)
pdf.cell(w=70,h=8,txt=columns[1],border=1)
pdf.cell(w=30,h=8,txt=columns[2],border=1)
pdf.cell(w=70,h=8,txt=columns[3],border=1)
pdf.cell(w=70,h=8,txt=columns[4],border=1,ln=1)
for index,row in df.iterrows():
pdf.set_font(family="Times",size=10)
pdf.set_text_color(80,80,80)
pdf.cell(w=30,h=8,txt=str(row[product_id]),border=1)
pdf.cell(w=70,h=8,txt=str(row[product_name]),border=1)
pdf.cell(w=30,h=8,txt=str(row[amount_purchased]),border=1)
pdf.cell(w=70,h=8,txt=str(row[price_per_unit]),border=1)
pdf.cell(w=70,h=8,txt=str(row[total_price]),border=1,ln=1)
total_sum=df[total_price].sum()
pdf.set_font(family="Times",size=10)
pdf.set_text_color(80,80,80)
pdf.cell(w=30,h=8,txt="",border=1)
pdf.cell(w=70,h=8,txt="",border=1)
pdf.cell(w=30,h=8,txt="",border=1)
pdf.cell(w=70,h=8,txt="",border=1)
pdf.cell(w=70,h=8,txt=str(total_sum),border=1,ln=1)
#Add total sum sentence
pdf.set_font(family="Times",size=10,style="B")
pdf.cell(w=30,h=8,txt=f"total_price is {total_sum}",ln=1)
#ADD company name and logo
pdf.set_font(family="Times",size=14,style="B")
pdf.cell(w=35,h=8,txt=f"GOKUL_TEJA")
pdf.image(image_path,w=10)
content='''hi everyone how are you all i hope that you are good'''
pdf.multi_cell(w=30,h=15,txt=content)
pdf.output(f"{pdfs_path}/{filename}.pdf")
|
PypiClean
|
/safegate-pro-frontend-20210805.0.tar.gz/safegate-pro-frontend-20210805.0/hass_frontend/frontend_latest/chunk.df34f1bea54a4dedf7c4.js
|
(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([[5748],{12198:(e,t,r)=>{"use strict";r.d(t,{p6:()=>a,mn:()=>d,D_:()=>u});var i=r(68928),n=r(14516),o=r(43274);const s=(0,n.Z)((e=>new Intl.DateTimeFormat(e.language,{year:"numeric",month:"long",day:"numeric"}))),a=o.Sb?(e,t)=>s(t).format(e):e=>(0,i.WU)(e,"longDate"),l=(0,n.Z)((e=>new Intl.DateTimeFormat(e.language,{day:"numeric",month:"short"}))),d=o.Sb?(e,t)=>l(t).format(e):e=>(0,i.WU)(e,"shortDate"),c=(0,n.Z)((e=>new Intl.DateTimeFormat(e.language,{weekday:"long",month:"long",day:"numeric"}))),u=o.Sb?(e,t)=>c(t).format(e):e=>(0,i.WU)(e,"dddd, MMM D")},44583:(e,t,r)=>{"use strict";r.d(t,{o:()=>l,E:()=>c});var i=r(68928),n=r(14516),o=r(43274),s=r(65810);const a=(0,n.Z)((e=>new Intl.DateTimeFormat(e.language,{year:"numeric",month:"long",day:"numeric",hour:"numeric",minute:"2-digit",hour12:(0,s.y)(e)}))),l=o.Op?(e,t)=>a(t).format(e):(e,t)=>(0,i.WU)(e,((0,s.y)(t)," A")),d=(0,n.Z)((e=>new Intl.DateTimeFormat(e.language,{year:"numeric",month:"long",day:"numeric",hour:"numeric",minute:"2-digit",second:"2-digit",hour12:(0,s.y)(e)}))),c=o.Op?(e,t)=>d(t).format(e):(e,t)=>(0,i.WU)(e,((0,s.y)(t)," A"))},49684:(e,t,r)=>{"use strict";r.d(t,{mr:()=>l,Vu:()=>c,xO:()=>h});var i=r(68928),n=r(14516),o=r(43274),s=r(65810);const a=(0,n.Z)((e=>new Intl.DateTimeFormat(e.language,{hour:"numeric",minute:"2-digit",hour12:(0,s.y)(e)}))),l=o.BF?(e,t)=>a(t).format(e):(e,t)=>(0,i.WU)(e,((0,s.y)(t)," A")),d=(0,n.Z)((e=>new Intl.DateTimeFormat(e.language,{hour:"numeric",minute:"2-digit",second:"2-digit",hour12:(0,s.y)(e)}))),c=o.BF?(e,t)=>d(t).format(e):(e,t)=>(0,i.WU)(e,((0,s.y)(t)," A")),u=(0,n.Z)((e=>new Intl.DateTimeFormat(e.language,{weekday:"long",hour:"numeric",minute:"2-digit",hour12:(0,s.y)(e)}))),h=o.BF?(e,t)=>u(t).format(e):(e,t)=>(0,i.WU)(e,((0,s.y)(t)," A"))},29171:(e,t,r)=>{"use strict";r.d(t,{D:()=>d});var i=r(56007),n=r(12198),o=r(44583),s=r(49684),a=r(45524),l=r(22311);const d=(e,t,r,d)=>{const c=void 0!==d?d:t.state;if(c===i.lz||c===i.nZ)return e(`state.default.${c}`);if(t.attributes.unit_of_measurement)return`${(0,a.u)(c,r)} ${t.attributes.unit_of_measurement}`;const u=(0,l.N)(t);if("input_datetime"===u){if(!d){let e;return t.attributes.has_time?t.attributes.has_date?(e=new Date(t.attributes.year,t.attributes.month-1,t.attributes.day,t.attributes.hour,t.attributes.minute),(0,o.o)(e,r)):(e=new Date,e.setHours(t.attributes.hour,t.attributes.minute),(0,s.mr)(e,r)):(e=new Date(t.attributes.year,t.attributes.month-1,t.attributes.day),(0,n.p6)(e,r))}try{const e=d.split(" ");if(2===e.length)return(0,o.o)(new Date(e.join("T")),r);if(1===e.length){if(d.includes("-"))return(0,n.p6)(new Date(`${d}T00:00`),r);if(d.includes(":")){const e=new Date;return(0,s.mr)(new Date(`${e.toISOString().split("T")[0]}T${d}`),r)}}return d}catch{return d}}return"humidifier"===u&&"on"===c&&t.attributes.humidity?`${t.attributes.humidity} %`:"counter"===u||"number"===u||"input_number"===u?(0,a.u)(c,r):t.attributes.device_class&&e(`component.${u}.state.${t.attributes.device_class}.${c}`)||e(`component.${u}.state._.${c}`)||c}},22311:(e,t,r)=>{"use strict";r.d(t,{N:()=>n});var i=r(58831);const n=e=>(0,i.M)(e.entity_id)},45524:(e,t,r)=>{"use strict";r.d(t,{u:()=>n});var i=r(66477);const n=(e,t,r)=>{let n;switch(null==t?void 0:t.number_format){case i.y4.comma_decimal:n=["en-US","en"];break;case i.y4.decimal_comma:n=["de","es","it"];break;case i.y4.space_comma:n=["fr","sv","cs"];break;case i.y4.system:n=void 0;break;default:n=null==t?void 0:t.language}if(Number.isNaN=Number.isNaN||function e(t){return"number"==typeof t&&e(t)},!Number.isNaN(Number(e))&&Intl&&(null==t?void 0:t.number_format)!==i.y4.none)try{return new Intl.NumberFormat(n,o(e,r)).format(Number(e))}catch(t){return console.error(t),new Intl.NumberFormat(void 0,o(e,r)).format(Number(e))}return e.toString()},o=(e,t)=>{const r=t||{};if("string"!=typeof e)return r;if(!t||!t.minimumFractionDigits&&!t.maximumFractionDigits){const t=e.indexOf(".")>-1?e.split(".")[1].length:0;r.minimumFractionDigits=t,r.maximumFractionDigits=t}return r}},26955:(e,t,r)=>{"use strict";r.r(t),r.d(t,{EntityRegistrySettingsHelper:()=>M});var i=r(50424),n=r(55358),o=r(7323),s=r(55642),a=r(47181),l=r(56005),d=r(74186),c=r(43180),u=r(11512),h=r(3300),f=r(74725),p=r(77535),m=r(8326),y=r(26765),v=r(11654),b=(r(13345),r(45122),r(65580),r(88108),r(38707),r(39509),r(48003),r(30879),r(58831)),g=(r(68101),r(43709),r(57292)),k=r(73826);function _(){_=function(){return e};var e={elementsDefinitionOrder:[["method"],["field"]],initializeInstanceElements:function(e,t){["method","field"].forEach((function(r){t.forEach((function(t){t.kind===r&&"own"===t.placement&&this.defineClassElement(e,t)}),this)}),this)},initializeClassElements:function(e,t){var r=e.prototype;["method","field"].forEach((function(i){t.forEach((function(t){var n=t.placement;if(t.kind===i&&("static"===n||"prototype"===n)){var o="static"===n?e:r;this.defineClassElement(o,t)}}),this)}),this)},defineClassElement:function(e,t){var r=t.descriptor;if("field"===t.kind){var i=t.initializer;r={enumerable:r.enumerable,writable:r.writable,configurable:r.configurable,value:void 0===i?void 0:i.call(e)}}Object.defineProperty(e,t.key,r)},decorateClass:function(e,t){var r=[],i=[],n={static:[],prototype:[],own:[]};if(e.forEach((function(e){this.addElementPlacement(e,n)}),this),e.forEach((function(e){if(!D(e))return r.push(e);var t=this.decorateElement(e,n);r.push(t.element),r.push.apply(r,t.extras),i.push.apply(i,t.finishers)}),this),!t)return{elements:r,finishers:i};var o=this.decorateConstructor(r,t);return i.push.apply(i,o.finishers),o.finishers=i,o},addElementPlacement:function(e,t,r){var i=t[e.placement];if(!r&&-1!==i.indexOf(e.key))throw new TypeError("Duplicated element ("+e.key+")");i.push(e.key)},decorateElement:function(e,t){for(var r=[],i=[],n=e.decorators,o=n.length-1;o>=0;o--){var s=t[e.placement];s.splice(s.indexOf(e.key),1);var a=this.fromElementDescriptor(e),l=this.toElementFinisherExtras((0,n[o])(a)||a);e=l.element,this.addElementPlacement(e,t),l.finisher&&i.push(l.finisher);var d=l.extras;if(d){for(var c=0;c<d.length;c++)this.addElementPlacement(d[c],t);r.push.apply(r,d)}}return{element:e,finishers:i,extras:r}},decorateConstructor:function(e,t){for(var r=[],i=t.length-1;i>=0;i--){var n=this.fromClassDescriptor(e),o=this.toClassDescriptor((0,t[i])(n)||n);if(void 0!==o.finisher&&r.push(o.finisher),void 0!==o.elements){e=o.elements;for(var s=0;s<e.length-1;s++)for(var a=s+1;a<e.length;a++)if(e[s].key===e[a].key&&e[s].placement===e[a].placement)throw new TypeError("Duplicated element ("+e[s].key+")")}}return{elements:e,finishers:r}},fromElementDescriptor:function(e){var t={kind:e.kind,key:e.key,placement:e.placement,descriptor:e.descriptor};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),"field"===e.kind&&(t.initializer=e.initializer),t},toElementDescriptors:function(e){var t;if(void 0!==e)return(t=e,function(e){if(Array.isArray(e))return e}(t)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(t)||function(e,t){if(e){if("string"==typeof e)return I(e,t);var r=Object.prototype.toString.call(e).slice(8,-1);return"Object"===r&&e.constructor&&(r=e.constructor.name),"Map"===r||"Set"===r?Array.from(e):"Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r)?I(e,t):void 0}}(t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()).map((function(e){var t=this.toElementDescriptor(e);return this.disallowProperty(e,"finisher","An element descriptor"),this.disallowProperty(e,"extras","An element descriptor"),t}),this)},toElementDescriptor:function(e){var t=String(e.kind);if("method"!==t&&"field"!==t)throw new TypeError('An element descriptor\'s .kind property must be either "method" or "field", but a decorator created an element descriptor with .kind "'+t+'"');var r=S(e.key),i=String(e.placement);if("static"!==i&&"prototype"!==i&&"own"!==i)throw new TypeError('An element descriptor\'s .placement property must be one of "static", "prototype" or "own", but a decorator created an element descriptor with .placement "'+i+'"');var n=e.descriptor;this.disallowProperty(e,"elements","An element descriptor");var o={kind:t,key:r,placement:i,descriptor:Object.assign({},n)};return"field"!==t?this.disallowProperty(e,"initializer","A method descriptor"):(this.disallowProperty(n,"get","The property descriptor of a field descriptor"),this.disallowProperty(n,"set","The property descriptor of a field descriptor"),this.disallowProperty(n,"value","The property descriptor of a field descriptor"),o.initializer=e.initializer),o},toElementFinisherExtras:function(e){return{element:this.toElementDescriptor(e),finisher:x(e,"finisher"),extras:this.toElementDescriptors(e.extras)}},fromClassDescriptor:function(e){var t={kind:"class",elements:e.map(this.fromElementDescriptor,this)};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),t},toClassDescriptor:function(e){var t=String(e.kind);if("class"!==t)throw new TypeError('A class descriptor\'s .kind property must be "class", but a decorator created a class descriptor with .kind "'+t+'"');this.disallowProperty(e,"key","A class descriptor"),this.disallowProperty(e,"placement","A class descriptor"),this.disallowProperty(e,"descriptor","A class descriptor"),this.disallowProperty(e,"initializer","A class descriptor"),this.disallowProperty(e,"extras","A class descriptor");var r=x(e,"finisher");return{elements:this.toElementDescriptors(e.elements),finisher:r}},runClassFinishers:function(e,t){for(var r=0;r<t.length;r++){var i=(0,t[r])(e);if(void 0!==i){if("function"!=typeof i)throw new TypeError("Finishers must return a constructor.");e=i}}return e},disallowProperty:function(e,t,r){if(void 0!==e[t])throw new TypeError(r+" can't have a ."+t+" property.")}};return e}function w(e){var t,r=S(e.key);"method"===e.kind?t={value:e.value,writable:!0,configurable:!0,enumerable:!1}:"get"===e.kind?t={get:e.value,configurable:!0,enumerable:!1}:"set"===e.kind?t={set:e.value,configurable:!0,enumerable:!1}:"field"===e.kind&&(t={configurable:!0,writable:!0,enumerable:!0});var i={kind:"field"===e.kind?"field":"method",key:r,placement:e.static?"static":"field"===e.kind?"own":"prototype",descriptor:t};return e.decorators&&(i.decorators=e.decorators),"field"===e.kind&&(i.initializer=e.value),i}function E(e,t){void 0!==e.descriptor.get?t.descriptor.get=e.descriptor.get:t.descriptor.set=e.descriptor.set}function D(e){return e.decorators&&e.decorators.length}function P(e){return void 0!==e&&!(void 0===e.value&&void 0===e.writable)}function x(e,t){var r=e[t];if(void 0!==r&&"function"!=typeof r)throw new TypeError("Expected '"+t+"' to be a function");return r}function S(e){var t=function(e,t){if("object"!=typeof e||null===e)return e;var r=e[Symbol.toPrimitive];if(void 0!==r){var i=r.call(e,t||"default");if("object"!=typeof i)return i;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===t?String:Number)(e)}(e,"string");return"symbol"==typeof t?t:String(t)}function I(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,i=new Array(t);r<t;r++)i[r]=e[r];return i}function O(e,t,r){return(O="undefined"!=typeof Reflect&&Reflect.get?Reflect.get:function(e,t,r){var i=function(e,t){for(;!Object.prototype.hasOwnProperty.call(e,t)&&null!==(e=A(e)););return e}(e,t);if(i){var n=Object.getOwnPropertyDescriptor(i,t);return n.get?n.get.call(r):n.value}})(e,t,r||e)}function A(e){return(A=Object.setPrototypeOf?Object.getPrototypeOf:function(e){return e.__proto__||Object.getPrototypeOf(e)})(e)}!function(e,t,r,i){var n=_();if(i)for(var o=0;o<i.length;o++)n=i[o](n);var s=t((function(e){n.initializeInstanceElements(e,a.elements)}),r),a=n.decorateClass(function(e){for(var t=[],r=function(e){return"method"===e.kind&&e.key===o.key&&e.placement===o.placement},i=0;i<e.length;i++){var n,o=e[i];if("method"===o.kind&&(n=t.find(r)))if(P(o.descriptor)||P(n.descriptor)){if(D(o)||D(n))throw new ReferenceError("Duplicated methods ("+o.key+") can't be decorated.");n.descriptor=o.descriptor}else{if(D(o)){if(D(n))throw new ReferenceError("Decorators can't be placed on different accessors with for the same property ("+o.key+").");n.decorators=o.decorators}E(o,n)}else t.push(o)}return t}(s.d.map(w)),e);n.initializeClassElements(s.F,a.elements),n.runClassFinishers(s.F,a.finishers)}([(0,n.Mo)("ha-registry-basic-editor")],(function(e,t){class r extends t{constructor(...t){super(...t),e(this)}}return{F:r,d:[{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"entry",value:void 0},{kind:"field",decorators:[(0,n.SB)()],key:"_origEntityId",value:void 0},{kind:"field",decorators:[(0,n.SB)()],key:"_entityId",value:void 0},{kind:"field",decorators:[(0,n.SB)()],key:"_areaId",value:void 0},{kind:"field",decorators:[(0,n.SB)()],key:"_disabledBy",value:void 0},{kind:"field",key:"_deviceLookup",value:void 0},{kind:"field",decorators:[(0,n.SB)()],key:"_device",value:void 0},{kind:"field",decorators:[(0,n.SB)()],key:"_submitting",value:void 0},{kind:"method",key:"updateEntry",value:async function(){this._submitting=!0;const e={new_entity_id:this._entityId.trim(),area_id:this._areaId||null};this.entry.disabled_by===this._disabledBy||null!==this._disabledBy&&"user"!==this._disabledBy||(e.disabled_by=this._disabledBy);try{const t=await(0,d.Nv)(this.hass,this._origEntityId,e);t.require_restart&&(0,y.Ys)(this,{text:this.hass.localize("ui.dialogs.entity_registry.editor.enabled_restart_confirm")}),t.reload_delay&&(0,y.Ys)(this,{text:this.hass.localize("ui.dialogs.entity_registry.editor.enabled_delay_confirm","delay",t.reload_delay)})}finally{this._submitting=!1}}},{kind:"method",key:"hassSubscribe",value:function(){return[(0,g.q4)(this.hass.connection,(e=>{this._deviceLookup={};for(const t of e)this._deviceLookup[t.id]=t;!this._device&&this.entry.device_id&&(this._device=this._deviceLookup[this.entry.device_id])}))]}},{kind:"method",key:"updated",value:function(e){O(A(r.prototype),"updated",this).call(this,e),e.has("entry")&&this.entry&&(this._origEntityId=this.entry.entity_id,this._entityId=this.entry.entity_id,this._disabledBy=this.entry.disabled_by,this._areaId=this.entry.area_id,this._device=this.entry.device_id&&this._deviceLookup?this._deviceLookup[this.entry.device_id]:void 0)}},{kind:"method",key:"render",value:function(){var e;if(!this.hass||!this.entry||this.entry.entity_id!==this._origEntityId)return i.dy``;const t=(0,b.M)(this._entityId.trim())!==(0,b.M)(this.entry.entity_id);return i.dy`
<paper-input
.value=${this._entityId}
@value-changed=${this._entityIdChanged}
.label=${this.hass.localize("ui.dialogs.entity_registry.editor.entity_id")}
error-message="Domain needs to stay the same"
.invalid=${t}
.disabled=${this._submitting}
></paper-input>
<ha-area-picker
.hass=${this.hass}
.value=${this._areaId}
.placeholder=${null===(e=this._device)||void 0===e?void 0:e.area_id}
@value-changed=${this._areaPicked}
></ha-area-picker>
<div class="row">
<ha-switch
.checked=${!this._disabledBy}
@change=${this._disabledByChanged}
>
</ha-switch>
<div>
<div>
${this.hass.localize("ui.dialogs.entity_registry.editor.enabled_label")}
</div>
<div class="secondary">
${this._disabledBy&&"user"!==this._disabledBy?this.hass.localize("ui.dialogs.entity_registry.editor.enabled_cause","cause",this.hass.localize(`config_entry.disabled_by.${this._disabledBy}`)):""}
${this.hass.localize("ui.dialogs.entity_registry.editor.enabled_description")}
<br />${this.hass.localize("ui.dialogs.entity_registry.editor.note")}
</div>
</div>
</div>
`}},{kind:"method",key:"_areaPicked",value:function(e){this._areaId=e.detail.value}},{kind:"method",key:"_entityIdChanged",value:function(e){this._entityId=e.detail.value}},{kind:"method",key:"_disabledByChanged",value:function(e){this._disabledBy=e.target.checked?null:"user"}},{kind:"get",static:!0,key:"styles",value:function(){return i.iv`
ha-switch {
margin-right: 16px;
}
.row {
margin-top: 8px;
color: var(--primary-text-color);
display: flex;
align-items: center;
}
.secondary {
color: var(--secondary-text-color);
}
`}}]}}),(0,k.f)(i.oi));function C(){C=function(){return e};var e={elementsDefinitionOrder:[["method"],["field"]],initializeInstanceElements:function(e,t){["method","field"].forEach((function(r){t.forEach((function(t){t.kind===r&&"own"===t.placement&&this.defineClassElement(e,t)}),this)}),this)},initializeClassElements:function(e,t){var r=e.prototype;["method","field"].forEach((function(i){t.forEach((function(t){var n=t.placement;if(t.kind===i&&("static"===n||"prototype"===n)){var o="static"===n?e:r;this.defineClassElement(o,t)}}),this)}),this)},defineClassElement:function(e,t){var r=t.descriptor;if("field"===t.kind){var i=t.initializer;r={enumerable:r.enumerable,writable:r.writable,configurable:r.configurable,value:void 0===i?void 0:i.call(e)}}Object.defineProperty(e,t.key,r)},decorateClass:function(e,t){var r=[],i=[],n={static:[],prototype:[],own:[]};if(e.forEach((function(e){this.addElementPlacement(e,n)}),this),e.forEach((function(e){if(!z(e))return r.push(e);var t=this.decorateElement(e,n);r.push(t.element),r.push.apply(r,t.extras),i.push.apply(i,t.finishers)}),this),!t)return{elements:r,finishers:i};var o=this.decorateConstructor(r,t);return i.push.apply(i,o.finishers),o.finishers=i,o},addElementPlacement:function(e,t,r){var i=t[e.placement];if(!r&&-1!==i.indexOf(e.key))throw new TypeError("Duplicated element ("+e.key+")");i.push(e.key)},decorateElement:function(e,t){for(var r=[],i=[],n=e.decorators,o=n.length-1;o>=0;o--){var s=t[e.placement];s.splice(s.indexOf(e.key),1);var a=this.fromElementDescriptor(e),l=this.toElementFinisherExtras((0,n[o])(a)||a);e=l.element,this.addElementPlacement(e,t),l.finisher&&i.push(l.finisher);var d=l.extras;if(d){for(var c=0;c<d.length;c++)this.addElementPlacement(d[c],t);r.push.apply(r,d)}}return{element:e,finishers:i,extras:r}},decorateConstructor:function(e,t){for(var r=[],i=t.length-1;i>=0;i--){var n=this.fromClassDescriptor(e),o=this.toClassDescriptor((0,t[i])(n)||n);if(void 0!==o.finisher&&r.push(o.finisher),void 0!==o.elements){e=o.elements;for(var s=0;s<e.length-1;s++)for(var a=s+1;a<e.length;a++)if(e[s].key===e[a].key&&e[s].placement===e[a].placement)throw new TypeError("Duplicated element ("+e[s].key+")")}}return{elements:e,finishers:r}},fromElementDescriptor:function(e){var t={kind:e.kind,key:e.key,placement:e.placement,descriptor:e.descriptor};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),"field"===e.kind&&(t.initializer=e.initializer),t},toElementDescriptors:function(e){var t;if(void 0!==e)return(t=e,function(e){if(Array.isArray(e))return e}(t)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(t)||function(e,t){if(e){if("string"==typeof e)return N(e,t);var r=Object.prototype.toString.call(e).slice(8,-1);return"Object"===r&&e.constructor&&(r=e.constructor.name),"Map"===r||"Set"===r?Array.from(e):"Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r)?N(e,t):void 0}}(t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()).map((function(e){var t=this.toElementDescriptor(e);return this.disallowProperty(e,"finisher","An element descriptor"),this.disallowProperty(e,"extras","An element descriptor"),t}),this)},toElementDescriptor:function(e){var t=String(e.kind);if("method"!==t&&"field"!==t)throw new TypeError('An element descriptor\'s .kind property must be either "method" or "field", but a decorator created an element descriptor with .kind "'+t+'"');var r=B(e.key),i=String(e.placement);if("static"!==i&&"prototype"!==i&&"own"!==i)throw new TypeError('An element descriptor\'s .placement property must be one of "static", "prototype" or "own", but a decorator created an element descriptor with .placement "'+i+'"');var n=e.descriptor;this.disallowProperty(e,"elements","An element descriptor");var o={kind:t,key:r,placement:i,descriptor:Object.assign({},n)};return"field"!==t?this.disallowProperty(e,"initializer","A method descriptor"):(this.disallowProperty(n,"get","The property descriptor of a field descriptor"),this.disallowProperty(n,"set","The property descriptor of a field descriptor"),this.disallowProperty(n,"value","The property descriptor of a field descriptor"),o.initializer=e.initializer),o},toElementFinisherExtras:function(e){return{element:this.toElementDescriptor(e),finisher:j(e,"finisher"),extras:this.toElementDescriptors(e.extras)}},fromClassDescriptor:function(e){var t={kind:"class",elements:e.map(this.fromElementDescriptor,this)};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),t},toClassDescriptor:function(e){var t=String(e.kind);if("class"!==t)throw new TypeError('A class descriptor\'s .kind property must be "class", but a decorator created a class descriptor with .kind "'+t+'"');this.disallowProperty(e,"key","A class descriptor"),this.disallowProperty(e,"placement","A class descriptor"),this.disallowProperty(e,"descriptor","A class descriptor"),this.disallowProperty(e,"initializer","A class descriptor"),this.disallowProperty(e,"extras","A class descriptor");var r=j(e,"finisher");return{elements:this.toElementDescriptors(e.elements),finisher:r}},runClassFinishers:function(e,t){for(var r=0;r<t.length;r++){var i=(0,t[r])(e);if(void 0!==i){if("function"!=typeof i)throw new TypeError("Finishers must return a constructor.");e=i}}return e},disallowProperty:function(e,t,r){if(void 0!==e[t])throw new TypeError(r+" can't have a ."+t+" property.")}};return e}function $(e){var t,r=B(e.key);"method"===e.kind?t={value:e.value,writable:!0,configurable:!0,enumerable:!1}:"get"===e.kind?t={get:e.value,configurable:!0,enumerable:!1}:"set"===e.kind?t={set:e.value,configurable:!0,enumerable:!1}:"field"===e.kind&&(t={configurable:!0,writable:!0,enumerable:!0});var i={kind:"field"===e.kind?"field":"method",key:r,placement:e.static?"static":"field"===e.kind?"own":"prototype",descriptor:t};return e.decorators&&(i.decorators=e.decorators),"field"===e.kind&&(i.initializer=e.value),i}function T(e,t){void 0!==e.descriptor.get?t.descriptor.get=e.descriptor.get:t.descriptor.set=e.descriptor.set}function z(e){return e.decorators&&e.decorators.length}function F(e){return void 0!==e&&!(void 0===e.value&&void 0===e.writable)}function j(e,t){var r=e[t];if(void 0!==r&&"function"!=typeof r)throw new TypeError("Expected '"+t+"' to be a function");return r}function B(e){var t=function(e,t){if("object"!=typeof e||null===e)return e;var r=e[Symbol.toPrimitive];if(void 0!==r){var i=r.call(e,t||"default");if("object"!=typeof i)return i;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===t?String:Number)(e)}(e,"string");return"symbol"==typeof t?t:String(t)}function N(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,i=new Array(t);r<t;r++)i[r]=e[r];return i}function L(e,t,r){return(L="undefined"!=typeof Reflect&&Reflect.get?Reflect.get:function(e,t,r){var i=function(e,t){for(;!Object.prototype.hasOwnProperty.call(e,t)&&null!==(e=U(e)););return e}(e,t);if(i){var n=Object.getOwnPropertyDescriptor(i,t);return n.get?n.get.call(r):n.value}})(e,t,r||e)}function U(e){return(U=Object.setPrototypeOf?Object.getPrototypeOf:function(e){return e.__proto__||Object.getPrototypeOf(e)})(e)}const R={input_boolean:{fetch:c.Aj,update:c.Xr,delete:c.wO},input_text:{fetch:p.YL,update:p.jt,delete:p.KB},input_number:{fetch:h.K4,update:h.hb,delete:h.fH},input_datetime:{fetch:u.s2,update:u.FF,delete:u.Gi},input_select:{fetch:f.LN,update:f.ON,delete:f.H3},counter:{fetch:l.W2,update:l.Rm,delete:l.YL},timer:{fetch:m.aT,update:m.mZ,delete:m.WH}};let M=function(e,t,r,i){var n=C();if(i)for(var o=0;o<i.length;o++)n=i[o](n);var s=t((function(e){n.initializeInstanceElements(e,a.elements)}),r),a=n.decorateClass(function(e){for(var t=[],r=function(e){return"method"===e.kind&&e.key===o.key&&e.placement===o.placement},i=0;i<e.length;i++){var n,o=e[i];if("method"===o.kind&&(n=t.find(r)))if(F(o.descriptor)||F(n.descriptor)){if(z(o)||z(n))throw new ReferenceError("Duplicated methods ("+o.key+") can't be decorated.");n.descriptor=o.descriptor}else{if(z(o)){if(z(n))throw new ReferenceError("Decorators can't be placed on different accessors with for the same property ("+o.key+").");n.decorators=o.decorators}T(o,n)}else t.push(o)}return t}(s.d.map($)),e);return n.initializeClassElements(s.F,a.elements),n.runClassFinishers(s.F,a.finishers)}([(0,n.Mo)("entity-settings-helper-tab")],(function(e,t){class r extends t{constructor(...t){super(...t),e(this)}}return{F:r,d:[{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"entry",value:void 0},{kind:"field",decorators:[(0,n.SB)()],key:"_error",value:void 0},{kind:"field",decorators:[(0,n.SB)()],key:"_item",value:void 0},{kind:"field",decorators:[(0,n.SB)()],key:"_submitting",value:void 0},{kind:"field",decorators:[(0,n.SB)()],key:"_componentLoaded",value:void 0},{kind:"field",decorators:[(0,n.IO)("ha-registry-basic-editor")],key:"_registryEditor",value:void 0},{kind:"method",key:"firstUpdated",value:function(e){L(U(r.prototype),"firstUpdated",this).call(this,e),this._componentLoaded=(0,o.p)(this.hass,this.entry.platform)}},{kind:"method",key:"updated",value:function(e){L(U(r.prototype),"updated",this).call(this,e),e.has("entry")&&(this._error=void 0,this._item=void 0,this._getItem())}},{kind:"method",key:"render",value:function(){if(void 0===this._item)return i.dy``;const e=this.hass.states[this.entry.entity_id];return i.dy`
<div class="form">
${this._error?i.dy` <div class="error">${this._error}</div> `:""}
${this._componentLoaded?null===this._item?this.hass.localize("ui.dialogs.helper_settings.yaml_not_editable"):i.dy`
<span @value-changed=${this._valueChanged}>
${(0,s.h)(`ha-${this.entry.platform}-form`,{hass:this.hass,item:this._item,entry:this.entry})}
</span>
`:this.hass.localize("ui.dialogs.helper_settings.platform_not_loaded","platform",this.entry.platform)}
<ha-registry-basic-editor
.hass=${this.hass}
.entry=${this.entry}
></ha-registry-basic-editor>
</div>
<div class="buttons">
<mwc-button
class="warning"
@click=${this._confirmDeleteItem}
.disabled=${this._submitting||!this._item&&!(null!=e&&e.attributes.restored)}
>
${this.hass.localize("ui.dialogs.entity_registry.editor.delete")}
</mwc-button>
<mwc-button
@click=${this._updateItem}
.disabled=${this._submitting||this._item&&!this._item.name}
>
${this.hass.localize("ui.dialogs.entity_registry.editor.update")}
</mwc-button>
</div>
`}},{kind:"method",key:"_valueChanged",value:function(e){this._error=void 0,this._item=e.detail.value}},{kind:"method",key:"_getItem",value:async function(){const e=await R[this.entry.platform].fetch(this.hass);this._item=e.find((e=>e.id===this.entry.unique_id))||null}},{kind:"method",key:"_updateItem",value:async function(){this._submitting=!0;try{var e;this._componentLoaded&&this._item&&await R[this.entry.platform].update(this.hass,this._item.id,this._item),await(null===(e=this._registryEditor)||void 0===e?void 0:e.updateEntry()),(0,a.B)(this,"close-dialog")}catch(e){this._error=e.message||"Unknown error"}finally{this._submitting=!1}}},{kind:"method",key:"_confirmDeleteItem",value:async function(){if(await(0,y.g7)(this,{text:this.hass.localize("ui.dialogs.entity_registry.editor.confirm_delete")})){this._submitting=!0;try{if(this._componentLoaded&&this._item)await R[this.entry.platform].delete(this.hass,this._item.id);else{const e=this.hass.states[this.entry.entity_id];if(null==e||!e.attributes.restored)return;await(0,d.z3)(this.hass,this.entry.entity_id)}(0,a.B)(this,"close-dialog")}finally{this._submitting=!1}}}},{kind:"get",static:!0,key:"styles",value:function(){return[v.Qx,i.iv`
:host {
display: block;
padding: 0 !important;
}
.form {
padding: 20px 24px;
margin-bottom: 53px;
}
.buttons {
position: absolute;
bottom: 0;
width: 100%;
box-sizing: border-box;
border-top: 1px solid
var(--mdc-dialog-scroll-divider-color, rgba(0, 0, 0, 0.12));
display: flex;
justify-content: space-between;
padding: 8px;
background-color: var(--mdc-theme-surface, #fff);
}
.error {
color: var(--error-color);
margin-bottom: 8px;
}
.row {
margin-top: 8px;
color: var(--primary-text-color);
}
.secondary {
color: var(--secondary-text-color);
}
`]}}]}}),i.oi)}}]);
//# sourceMappingURL=chunk.df34f1bea54a4dedf7c4.js.map
|
PypiClean
|
/cifrazia-django-jet-1.1.4.tar.gz/cifrazia-django-jet-1.1.4/jet/dashboard/models.py
|
import json
from importlib import import_module
from django.db import models
from django.utils.translation import gettext_lazy as _
from six import python_2_unicode_compatible
from jet.utils import LazyDateTimeEncoder
@python_2_unicode_compatible
class UserDashboardModule(models.Model):
title = models.CharField(verbose_name=_('Title'), max_length=255)
module = models.CharField(verbose_name=_('module'), max_length=255)
app_label = models.CharField(verbose_name=_('application name'), max_length=255, null=True, blank=True)
user = models.PositiveIntegerField(verbose_name=_('user'))
column = models.PositiveIntegerField(verbose_name=_('column'))
order = models.IntegerField(verbose_name=_('order'))
settings = models.TextField(verbose_name=_('settings'), default='', blank=True)
children = models.TextField(verbose_name=_('children'), default='', blank=True)
collapsed = models.BooleanField(verbose_name=_('collapsed'), default=False)
class Meta:
verbose_name = _('user dashboard module')
verbose_name_plural = _('user dashboard modules')
ordering = ('column', 'order')
def __str__(self):
return self.module
def load_module(self):
try:
package, module_name = self.module.rsplit('.', 1)
package = import_module(package)
module = getattr(package, module_name)
return module
except AttributeError:
return None
except ImportError:
return None
def pop_settings(self, pop_settings):
settings = json.loads(self.settings)
for setting in pop_settings:
if setting in settings:
settings.pop(setting)
self.settings = json.dumps(settings, cls=LazyDateTimeEncoder)
self.save()
def update_settings(self, update_settings):
settings = json.loads(self.settings)
settings.update(update_settings)
self.settings = json.dumps(settings, cls=LazyDateTimeEncoder)
self.save()
|
PypiClean
|
/tls.messaging-0.6.4.tar.gz/tls.messaging-0.6.4/tls/messaging/oauth.py
|
import dataclasses
import json
import math
import time
from urllib import error, parse, request
from . import exceptions
from .utils import config
@dataclasses.dataclass
class TToken:
"""
OAuth token.
Attrs:
access_token: The OAuth access token.
token_type: The type of the token.
expires_in: The number of seconds until the token expires.
retrieved_at: The time when the token was retrieved.
expired: Whether the token is expired.
authorization: The value of the Authorization header with the token.
"""
def __init__(self, access_token: str, token_type: str, expires_in: str):
"""Construct."""
self.retrieved_at = math.ceil(time.time())
self.access_token = access_token
self.token_type = token_type
self.expires_in = int(expires_in)
# The access token
access_token: str
# The type of the token
token_type: str
# The time to expiry
expires_in: int
# The time it was created
retrieved_at: int
@property
def expired(self):
"""Whether the tokens are expired."""
return math.ceil(time.time()) >= self.retrieved_at + self.expires_in
@property
def authorization(self):
"""Create the value of the Authorization header."""
return f"Bearer {self.access_token}"
_CACHE = {"old_token": None}
def _reuse_token(func):
"""Decorate to reuse tokens that are not expired."""
def inner() -> TToken:
"""Decorator."""
if _CACHE["old_token"] is None or _CACHE["old_token"].expired:
_CACHE["old_token"] = func()
assert _CACHE["old_token"] is not None
return _CACHE["old_token"]
return inner
def _get_token() -> TToken:
"""
Retrieve the OAuth tokens.
Returns:
The oauth tokens.
"""
url = "https://tapi.telstra.com/v2/oauth/token"
data = parse.urlencode(
{
"grant_type": "client_credentials",
"client_id": config.get().tls_client_key,
"client_secret": config.get().tls_client_secret,
"scope": "NSMS",
}
).encode("ascii")
try:
with request.urlopen(url, data) as response:
return TToken(**json.loads(response.read().decode()))
except error.HTTPError as exc:
raise exceptions.CredentialError(f"Could not retrieve token: {exc}") from exc
get_token = _reuse_token(_get_token)
|
PypiClean
|
/ai_flow-0.3.1.tar.gz/ai_flow-0.3.1/airflow/models/dagbag.py
|
import hashlib
import importlib
import importlib.machinery
import importlib.util
import logging
import os
import sys
import textwrap
import traceback
import warnings
import zipfile
from datetime import datetime, timedelta
from typing import Dict, List, NamedTuple, Optional
import tenacity
from croniter import CroniterBadCronError, CroniterBadDateError, CroniterNotAlphaError, croniter
from sqlalchemy.exc import OperationalError
from sqlalchemy.orm import Session
from tabulate import tabulate
from airflow import settings
from airflow.configuration import conf
from airflow.exceptions import AirflowClusterPolicyViolation, AirflowDagCycleException, SerializedDagNotFound
from airflow.stats import Stats
from airflow.utils import timezone
from airflow.utils.dag_cycle_tester import test_cycle
from airflow.utils.file import correct_maybe_zipped, list_py_file_paths, might_contain_dag
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.session import provide_session
from airflow.utils.timeout import timeout
class FileLoadStat(NamedTuple):
"""Information about single file"""
file: str
duration: timedelta
dag_num: int
task_num: int
dags: str
class DagBag(LoggingMixin):
"""
A dagbag is a collection of dags, parsed out of a folder tree and has high
level configuration settings, like what database to use as a backend and
what executor to use to fire off tasks. This makes it easier to run
distinct environments for say production and development, tests, or for
different teams or security profiles. What would have been system level
settings are now dagbag level so that one system can run multiple,
independent settings sets.
:param dag_folder: the folder to scan to find DAGs
:type dag_folder: unicode
:param include_examples: whether to include the examples that ship
with airflow or not
:type include_examples: bool
:param include_smart_sensor: whether to include the smart sensor native
DAGs that create the smart sensor operators for whole cluster
:type include_smart_sensor: bool
:param read_dags_from_db: Read DAGs from DB if ``True`` is passed.
If ``False`` DAGs are read from python files.
:type read_dags_from_db: bool
"""
DAGBAG_IMPORT_TIMEOUT = conf.getfloat('core', 'DAGBAG_IMPORT_TIMEOUT')
SCHEDULER_ZOMBIE_TASK_THRESHOLD = conf.getint('scheduler', 'scheduler_zombie_task_threshold')
def __init__(
self,
dag_folder: Optional[str] = None,
include_examples: bool = conf.getboolean('core', 'LOAD_EXAMPLES'),
include_smart_sensor: bool = conf.getboolean('smart_sensor', 'USE_SMART_SENSOR'),
safe_mode: bool = conf.getboolean('core', 'DAG_DISCOVERY_SAFE_MODE'),
read_dags_from_db: bool = False,
store_serialized_dags: Optional[bool] = None,
):
# Avoid circular import
from airflow.models.dag import DAG
super().__init__()
if store_serialized_dags:
warnings.warn(
"The store_serialized_dags parameter has been deprecated. "
"You should pass the read_dags_from_db parameter.",
DeprecationWarning,
stacklevel=2,
)
read_dags_from_db = store_serialized_dags
dag_folder = dag_folder or settings.DAGS_FOLDER
self.dag_folder = dag_folder
self.dags: Dict[str, DAG] = {}
# the file's last modified timestamp when we last read it
self.file_last_changed: Dict[str, datetime] = {}
self.import_errors: Dict[str, str] = {}
self.has_logged = False
self.read_dags_from_db = read_dags_from_db
# Only used by read_dags_from_db=True
self.dags_last_fetched: Dict[str, datetime] = {}
# Only used by SchedulerJob to compare the dag_hash to identify change in DAGs
self.dags_hash: Dict[str, str] = {}
self.dagbag_import_error_tracebacks = conf.getboolean('core', 'dagbag_import_error_tracebacks')
self.dagbag_import_error_traceback_depth = conf.getint('core', 'dagbag_import_error_traceback_depth')
self.collect_dags(
dag_folder=dag_folder,
include_examples=include_examples,
include_smart_sensor=include_smart_sensor,
safe_mode=safe_mode,
)
def size(self) -> int:
""":return: the amount of dags contained in this dagbag"""
return len(self.dags)
@property
def store_serialized_dags(self) -> bool:
"""Whether or not to read dags from DB"""
warnings.warn(
"The store_serialized_dags property has been deprecated. Use read_dags_from_db instead.",
DeprecationWarning,
stacklevel=2,
)
return self.read_dags_from_db
@property
def dag_ids(self) -> List[str]:
"""
:return: a list of DAG IDs in this bag
:rtype: List[unicode]
"""
return list(self.dags.keys())
@provide_session
def get_dag(self, dag_id, session: Session = None):
"""
Gets the DAG out of the dictionary, and refreshes it if expired
:param dag_id: DAG Id
:type dag_id: str
"""
# Avoid circular import
from airflow.models.dag import DagModel
if self.read_dags_from_db:
# Import here so that serialized dag is only imported when serialization is enabled
from airflow.models.serialized_dag import SerializedDagModel
if dag_id not in self.dags:
# Load from DB if not (yet) in the bag
self._add_dag_from_db(dag_id=dag_id, session=session)
return self.dags.get(dag_id)
# If DAG is in the DagBag, check the following
# 1. if time has come to check if DAG is updated (controlled by min_serialized_dag_fetch_secs)
# 2. check the last_updated column in SerializedDag table to see if Serialized DAG is updated
# 3. if (2) is yes, fetch the Serialized DAG.
min_serialized_dag_fetch_secs = timedelta(seconds=settings.MIN_SERIALIZED_DAG_FETCH_INTERVAL)
if (
dag_id in self.dags_last_fetched
and timezone.utcnow() > self.dags_last_fetched[dag_id] + min_serialized_dag_fetch_secs
):
sd_last_updated_datetime = SerializedDagModel.get_last_updated_datetime(
dag_id=dag_id,
session=session,
)
if sd_last_updated_datetime > self.dags_last_fetched[dag_id]:
self._add_dag_from_db(dag_id=dag_id, session=session)
return self.dags.get(dag_id)
# If asking for a known subdag, we want to refresh the parent
dag = None
root_dag_id = dag_id
if dag_id in self.dags:
dag = self.dags[dag_id]
if dag.is_subdag:
root_dag_id = dag.parent_dag.dag_id # type: ignore
# If DAG Model is absent, we can't check last_expired property. Is the DAG not yet synchronized?
orm_dag = DagModel.get_current(root_dag_id, session=session)
if not orm_dag:
return self.dags.get(dag_id)
# If the dag corresponding to root_dag_id is absent or expired
is_missing = root_dag_id not in self.dags
is_expired = orm_dag.last_expired and dag and dag.last_loaded < orm_dag.last_expired
if is_missing or is_expired:
# Reprocess source file
found_dags = self.process_file(
filepath=correct_maybe_zipped(orm_dag.fileloc), only_if_updated=False
)
# If the source file no longer exports `dag_id`, delete it from self.dags
if found_dags and dag_id in [found_dag.dag_id for found_dag in found_dags]:
return self.dags[dag_id]
elif dag_id in self.dags:
del self.dags[dag_id]
return self.dags.get(dag_id)
def _add_dag_from_db(self, dag_id: str, session: Session):
"""Add DAG to DagBag from DB"""
from airflow.models.serialized_dag import SerializedDagModel
row = SerializedDagModel.get(dag_id, session)
if not row:
raise SerializedDagNotFound(f"DAG '{dag_id}' not found in serialized_dag table")
dag = row.dag
for subdag in dag.subdags:
self.dags[subdag.dag_id] = subdag
self.dags[dag.dag_id] = dag
self.dags_last_fetched[dag.dag_id] = timezone.utcnow()
self.dags_hash[dag.dag_id] = row.dag_hash
def process_file(self, filepath, only_if_updated=True, safe_mode=True):
"""
Given a path to a python module or zip file, this method imports
the module and look for dag objects within it.
"""
# if the source file no longer exists in the DB or in the filesystem,
# return an empty list
# todo: raise exception?
if filepath is None or not os.path.isfile(filepath):
return []
try:
# This failed before in what may have been a git sync
# race condition
file_last_changed_on_disk = datetime.fromtimestamp(os.path.getmtime(filepath))
if (
only_if_updated
and filepath in self.file_last_changed
and file_last_changed_on_disk == self.file_last_changed[filepath]
):
return []
except Exception as e: # pylint: disable=broad-except
self.log.exception(e)
return []
if not zipfile.is_zipfile(filepath):
mods = self._load_modules_from_file(filepath, safe_mode)
else:
mods = self._load_modules_from_zip(filepath, safe_mode)
found_dags = self._process_modules(filepath, mods, file_last_changed_on_disk)
self.file_last_changed[filepath] = file_last_changed_on_disk
return found_dags
def _load_modules_from_file(self, filepath, safe_mode):
if not might_contain_dag(filepath, safe_mode):
# Don't want to spam user with skip messages
if not self.has_logged:
self.has_logged = True
self.log.info("File %s assumed to contain no DAGs. Skipping.", filepath)
return []
self.log.debug("Importing %s", filepath)
org_mod_name, _ = os.path.splitext(os.path.split(filepath)[-1])
path_hash = hashlib.sha1(filepath.encode('utf-8')).hexdigest()
mod_name = f'unusual_prefix_{path_hash}_{org_mod_name}'
if mod_name in sys.modules:
del sys.modules[mod_name]
timeout_msg = f"DagBag import timeout for {filepath} after {self.DAGBAG_IMPORT_TIMEOUT}s"
with timeout(self.DAGBAG_IMPORT_TIMEOUT, error_message=timeout_msg):
try:
loader = importlib.machinery.SourceFileLoader(mod_name, filepath)
spec = importlib.util.spec_from_loader(mod_name, loader)
new_module = importlib.util.module_from_spec(spec)
sys.modules[spec.name] = new_module
loader.exec_module(new_module)
return [new_module]
except Exception as e: # pylint: disable=broad-except
self.log.exception("Failed to import: %s", filepath)
if self.dagbag_import_error_tracebacks:
self.import_errors[filepath] = traceback.format_exc(
limit=-self.dagbag_import_error_traceback_depth
)
else:
self.import_errors[filepath] = str(e)
return []
def _load_modules_from_zip(self, filepath, safe_mode):
mods = []
current_zip_file = zipfile.ZipFile(filepath)
for zip_info in current_zip_file.infolist():
head, _ = os.path.split(zip_info.filename)
mod_name, ext = os.path.splitext(zip_info.filename)
if ext not in [".py", ".pyc"]:
continue
if head:
continue
if mod_name == '__init__':
self.log.warning("Found __init__.%s at root of %s", ext, filepath)
self.log.debug("Reading %s from %s", zip_info.filename, filepath)
if not might_contain_dag(zip_info.filename, safe_mode, current_zip_file):
# todo: create ignore list
# Don't want to spam user with skip messages
if not self.has_logged or True:
self.has_logged = True
self.log.info(
"File %s:%s assumed to contain no DAGs. Skipping.", filepath, zip_info.filename
)
continue
if mod_name in sys.modules:
del sys.modules[mod_name]
try:
sys.path.insert(0, filepath)
current_module = importlib.import_module(mod_name)
mods.append(current_module)
except Exception as e: # pylint: disable=broad-except
self.log.exception("Failed to import: %s", filepath)
if self.dagbag_import_error_tracebacks:
self.import_errors[filepath] = traceback.format_exc(
limit=-self.dagbag_import_error_traceback_depth
)
else:
self.import_errors[filepath] = str(e)
return mods
def _process_modules(self, filepath, mods, file_last_changed_on_disk):
from airflow.models.dag import DAG # Avoid circular import
is_zipfile = zipfile.is_zipfile(filepath)
top_level_dags = [o for m in mods for o in list(m.__dict__.values()) if isinstance(o, DAG)]
found_dags = []
for dag in top_level_dags:
if not dag.full_filepath:
dag.full_filepath = filepath
if dag.fileloc != filepath and not is_zipfile:
dag.fileloc = filepath
try:
dag.is_subdag = False
if isinstance(dag.normalized_schedule_interval, str):
croniter(dag.normalized_schedule_interval)
self.bag_dag(dag=dag, root_dag=dag)
found_dags.append(dag)
found_dags += dag.subdags
except (CroniterBadCronError, CroniterBadDateError, CroniterNotAlphaError) as cron_e:
self.log.exception("Failed to bag_dag: %s", dag.full_filepath)
self.import_errors[dag.full_filepath] = f"Invalid Cron expression: {cron_e}"
self.file_last_changed[dag.full_filepath] = file_last_changed_on_disk
except (AirflowDagCycleException, AirflowClusterPolicyViolation) as exception:
self.log.exception("Failed to bag_dag: %s", dag.full_filepath)
self.import_errors[dag.full_filepath] = str(exception)
self.file_last_changed[dag.full_filepath] = file_last_changed_on_disk
return found_dags
def bag_dag(self, dag, root_dag):
"""
Adds the DAG into the bag, recurses into sub dags.
Throws AirflowDagCycleException if a cycle is detected in this dag or its subdags
"""
test_cycle(dag) # throws if a task cycle is found
dag.resolve_template_files()
dag.last_loaded = timezone.utcnow()
# Check policies
settings.dag_policy(dag)
for task in dag.tasks:
settings.task_policy(task)
subdags = dag.subdags
try:
for subdag in subdags:
subdag.full_filepath = dag.full_filepath
subdag.parent_dag = dag
subdag.is_subdag = True
self.bag_dag(dag=subdag, root_dag=root_dag)
self.dags[dag.dag_id] = dag
self.log.debug('Loaded DAG %s', dag)
except AirflowDagCycleException as cycle_exception:
# There was an error in bagging the dag. Remove it from the list of dags
self.log.exception('Exception bagging dag: %s', dag.dag_id)
# Only necessary at the root level since DAG.subdags automatically
# performs DFS to search through all subdags
if dag == root_dag:
for subdag in subdags:
if subdag.dag_id in self.dags:
del self.dags[subdag.dag_id]
raise cycle_exception
def collect_dags(
self,
dag_folder=None,
only_if_updated=True,
include_examples=conf.getboolean('core', 'LOAD_EXAMPLES'),
include_smart_sensor=conf.getboolean('smart_sensor', 'USE_SMART_SENSOR'),
safe_mode=conf.getboolean('core', 'DAG_DISCOVERY_SAFE_MODE'),
):
"""
Given a file path or a folder, this method looks for python modules,
imports them and adds them to the dagbag collection.
Note that if a ``.airflowignore`` file is found while processing
the directory, it will behave much like a ``.gitignore``,
ignoring files that match any of the regex patterns specified
in the file.
**Note**: The patterns in .airflowignore are treated as
un-anchored regexes, not shell-like glob patterns.
"""
if self.read_dags_from_db:
return
self.log.info("Filling up the DagBag from %s", dag_folder)
dag_folder = dag_folder or self.dag_folder
# Used to store stats around DagBag processing
stats = []
dag_folder = correct_maybe_zipped(dag_folder)
for filepath in list_py_file_paths(
dag_folder,
safe_mode=safe_mode,
include_examples=include_examples,
include_smart_sensor=include_smart_sensor,
):
try:
file_parse_start_dttm = timezone.utcnow()
found_dags = self.process_file(filepath, only_if_updated=only_if_updated, safe_mode=safe_mode)
file_parse_end_dttm = timezone.utcnow()
stats.append(
FileLoadStat(
file=filepath.replace(settings.DAGS_FOLDER, ''),
duration=file_parse_end_dttm - file_parse_start_dttm,
dag_num=len(found_dags),
task_num=sum([len(dag.tasks) for dag in found_dags]),
dags=str([dag.dag_id for dag in found_dags]),
)
)
except Exception as e: # pylint: disable=broad-except
self.log.exception(e)
self.dagbag_stats = sorted(stats, key=lambda x: x.duration, reverse=True)
def collect_dags_from_db(self):
"""Collects DAGs from database."""
from airflow.models.serialized_dag import SerializedDagModel
with Stats.timer('collect_db_dags'):
self.log.info("Filling up the DagBag from database")
# The dagbag contains all rows in serialized_dag table. Deleted DAGs are deleted
# from the table by the scheduler job.
self.dags = SerializedDagModel.read_all_dags()
# Adds subdags.
# DAG post-processing steps such as self.bag_dag and croniter are not needed as
# they are done by scheduler before serialization.
subdags = {}
for dag in self.dags.values():
for subdag in dag.subdags:
subdags[subdag.dag_id] = subdag
self.dags.update(subdags)
def dagbag_report(self):
"""Prints a report around DagBag loading stats"""
stats = self.dagbag_stats
dag_folder = self.dag_folder
duration = sum([o.duration for o in stats], timedelta()).total_seconds()
dag_num = sum([o.dag_num for o in stats])
task_num = sum([o.task_num for o in stats])
table = tabulate(stats, headers="keys")
report = textwrap.dedent(
f"""\n
-------------------------------------------------------------------
DagBag loading stats for {dag_folder}
-------------------------------------------------------------------
Number of DAGs: {dag_num}
Total task number: {task_num}
DagBag parsing time: {duration}
{table}
"""
)
return report
@provide_session
def sync_to_db(self, session: Optional[Session] = None):
"""Save attributes about list of DAG to the DB."""
# To avoid circular import - airflow.models.dagbag -> airflow.models.dag -> airflow.models.dagbag
from airflow.models.dag import DAG
from airflow.models.serialized_dag import SerializedDagModel
def _serialze_dag_capturing_errors(dag, session):
"""
Try to serialize the dag to the DB, but make a note of any errors.
We can't place them directly in import_errors, as this may be retried, and work the next time
"""
if dag.is_subdag:
return []
try:
# We cant use bulk_write_to_db as we want to capture each error individually
SerializedDagModel.write_dag(
dag,
min_update_interval=settings.MIN_SERIALIZED_DAG_UPDATE_INTERVAL,
session=session,
)
return []
except OperationalError:
raise
except Exception: # pylint: disable=broad-except
return [(dag.fileloc, traceback.format_exc(limit=-self.dagbag_import_error_traceback_depth))]
# Retry 'DAG.bulk_write_to_db' & 'SerializedDagModel.bulk_sync_to_db' in case
# of any Operational Errors
# In case of failures, provide_session handles rollback
for attempt in tenacity.Retrying(
retry=tenacity.retry_if_exception_type(exception_types=OperationalError),
wait=tenacity.wait_random_exponential(multiplier=0.5, max=5),
stop=tenacity.stop_after_attempt(settings.MAX_DB_RETRIES),
before_sleep=tenacity.before_sleep_log(self.log, logging.DEBUG),
reraise=True,
):
with attempt:
serialize_errors = []
self.log.debug(
"Running dagbag.sync_to_db with retries. Try %d of %d",
attempt.retry_state.attempt_number,
settings.MAX_DB_RETRIES,
)
self.log.debug("Calling the DAG.bulk_sync_to_db method")
try:
# Write Serialized DAGs to DB, capturing errors
for dag in self.dags.values():
serialize_errors.extend(_serialze_dag_capturing_errors(dag, session))
DAG.bulk_write_to_db(self.dags.values(), session=session)
except OperationalError:
session.rollback()
raise
# Only now we are "complete" do we update import_errors - don't want to record errors from
# previous failed attempts
self.import_errors.update(dict(serialize_errors))
|
PypiClean
|
/PaperCV-0.3.tar.gz/PaperCV-0.3/papercv/citation.py
|
import ads
class papers:
def __init__(self, orcid_id, username, sort_by='year', filename='citation_list'):
self.orcid_id = orcid_id
self.username = username
self.sort_by = sort_by
self.filename = filename
def query_ADS(self):
self.first_paper_data = list(ads.SearchQuery(orcid=self.orcid_id, first_author=self.username, sort=self.sort_by,
fl=['author', 'title', 'pub', 'year', 'volume', 'page', 'citation_count']))
self.nth_paper_data = list(ads.SearchQuery(orcid=self.orcid_id, sort=self.sort_by,
fl=['author', 'title', 'pub', 'year', 'volume', 'page', 'citation_count']))
def generate_citation(self):
# NOTE: Provide option for number of authors to be displayed -- print et al. after that number
# NOTE: Formatting for nth author
self.query_ADS()
# First Author Paper Data and Citation Building
first_paper_citations = []
first_paper_titles = []
for index in range(len(self.first_paper_data)):
citation_string = ''
if(self.first_paper_data[index].year!=None and self.first_paper_data[index].pub!=None and
self.first_paper_data[index].volume!=None and self.first_paper_data[index].page[0]!=None and self.first_paper_data[index].title[0]!=None):
paper_title = self.first_paper_data[index].title[0]
for i, author_name in enumerate(self.first_paper_data[index].author):
split_name = author_name.split(',')
if(len(split_name)==1):
split_name.append(split_name[0][0]+' '+split_name[0][1])
split_last_name = split_name[1].split(' ')
if(len(split_last_name)==3):
citation_string += split_name[0]+', '+split_last_name[1][0]+'. '+split_last_name[2][0]+'., '
else:
citation_string += split_name[0]+', '+split_last_name[1][0]+'., '
citation_string += self.first_paper_data[index].year+', '+self.first_paper_data[index].pub+', '+self.first_paper_data[index].volume+', '+self.first_paper_data[index].page[0]
first_paper_citations.append(citation_string)
first_paper_titles.append(paper_title)
self.first_paper_citations = first_paper_citations
self.first_paper_titles = first_paper_titles
############################################
# nth Author Paper Data and Citation Building
nth_paper_citations = []
nth_paper_titles = []
for index in range(len(self.nth_paper_data)):
citation_string = ''
if(self.nth_paper_data[index].year!=None and self.nth_paper_data[index].pub!=None and
self.nth_paper_data[index].volume!=None and self.nth_paper_data[index].page[0]!=None and self.nth_paper_data[index].title[0]!=None):
paper_title = self.nth_paper_data[index].title[0]
for i, author_name in enumerate(self.nth_paper_data[index].author):
split_name = author_name.split(',')
if(len(split_name)==1):
split_name.append(split_name[0][0]+' '+split_name[0][1])
split_last_name = split_name[1].split(' ')
if(len(split_last_name)==3):
citation_string += split_name[0]+', '+split_last_name[1][0]+'. '+split_last_name[2][0]+'., '
else:
citation_string += split_name[0]+', '+split_last_name[1][0]+'., '
citation_string += self.nth_paper_data[index].year+', '+self.nth_paper_data[index].pub+', '+self.nth_paper_data[index].volume+', '+self.nth_paper_data[index].page[0]
if(paper_title not in self.first_paper_titles):
nth_paper_citations.append(citation_string)
nth_paper_titles.append(paper_title)
self.nth_paper_citations = nth_paper_citations
self.nth_paper_titles = nth_paper_titles
def write_citation(self):
cite_file = open(self.filename+'_first_author.txt',"w")
for index in range(len(self.first_paper_titles)):
cite_file.write(self.first_paper_titles[index])
cite_file.write('\n')
cite_file.write(self.first_paper_citations[index])
cite_file.write('\n')
cite_file.write('\n')
cite_file.close()
cite_file = open(self.filename+'_nth_author.txt',"w")
for index in range(len(self.nth_paper_titles)):
cite_file.write(self.nth_paper_titles[index])
cite_file.write('\n')
cite_file.write(self.nth_paper_citations[index])
cite_file.write('\n')
cite_file.write('\n')
cite_file.close()
def create_citation_file(orcid_id, username, sort_by='year', filename='citation_list'):
"""Citation File Creation
Generate .txt file with paper titles and citations for given author name.
Args:
orcid_id (str): Author unique ORCID identifier - ORCID ID is used to search papers.
username (str): Name of author in 'Last Name, First Name/First Initial' format. E.g. 'Sanghi, Aniket' or 'Shah, N.'
sort_by (str): Sort the papers queried by either 'year' or 'citation_count'
filename (str): Filename for the .txt file with paper titles and citations. E.g. 'citation_list'
Returns:
citation file: .txt file with first and nth author paper titles and citations.
"""
author_obj = papers(orcid_id=orcid_id, username=username, sort_by=sort_by, filename=filename)
author_obj.generate_citation()
author_obj.write_citation()
|
PypiClean
|
/GCMlib-1.0.4.tar.gz/GCMlib-1.0.4/gcm.py
|
import os
import base64 as b64
import json
from alive_progress import alive_bar
#AES stoof
from Crypto.Cipher import AES
from Crypto.Random import random
#KeyGen | kdf
import argon2
#The header that's used with the aes encryption for the json object is not encrypted, just base64 encoded and I don't really know of its importance.
header = f"Encrypted using GCMlib. DO NOT TAMPER WITH. | Made by therealOri | {os.urandom(8)}"
header = bytes(header, 'utf-8')
#clearing terminal.
def clear():
os.system("clear||cls")
#Make master key for encrypting stuff.
def keygen(master):
if len(master) < 100:
clear()
input('Password/characters used must be 100 characters in length or more!\n\nPress "eneter" to continue...')
clear()
return None
else:
salt = os.urandom(16)
# derive | DO NOT MESS WITH...unless you know what you are doing and or have more than 8GB of ram to spare and a really good CPU.
print("Generating key...")
with alive_bar(0) as bar:
key = argon2.hash_password_raw(
time_cost=16,
memory_cost=2**20,
parallelism=4,
hash_len=32,
password=master,
salt=salt,
type=argon2.Type.ID
)
bar()
clear()
return key #returns bytes. You will need to base64 encode them yourself if you want a "shareable key"
# Encrypting the passwords with master key and AES encryption.
def stringE(*, enc_data, key):
cipher = AES.new(key, AES.MODE_GCM)
cipher.update(header)
ciphertext, tag = cipher.encrypt_and_digest(enc_data)
json_k = [ 'nonce', 'header', 'ciphertext', 'tag' ]
json_v = [ b64.b64encode(x).decode('utf-8') for x in [cipher.nonce, header, ciphertext, tag ]]
result = json.dumps(dict(zip(json_k, json_v)))
result_bytes = bytes(result, 'utf-8')
b64_result = b64.b64encode(result_bytes)
return b64_result.decode()
#Decrypting the passwords/data with master key.
def stringD(*, dcr_data, key):
try:
json_input = b64.b64decode(dcr_data)
b64j = json.loads(json_input)
json_k = [ 'nonce', 'header', 'ciphertext', 'tag' ]
jv = {k:b64.b64decode(b64j[k]) for k in json_k}
cipher = AES.new(key, AES.MODE_GCM, nonce=jv['nonce'])
cipher.update(jv['header'])
plaintext = cipher.decrypt_and_verify(jv['ciphertext'], jv['tag'])
return plaintext
except (ValueError, KeyError) as e:
print(f'Oops, an error has occured: "{e}".\n')
input("Incorrect data given, or Data has been tampered with. Can't decrypt.\n\nPress 'enter' to continue...")
clear()
return None
if __name__ == '__main__':
pass
|
PypiClean
|
/py-pure-client-1.38.0.tar.gz/py-pure-client-1.38.0/pypureclient/flasharray/FA_2_6/models/reference.py
|
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_6 import models
class Reference(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'id': 'str',
'name': 'str'
}
attribute_map = {
'id': 'id',
'name': 'name'
}
required_args = {
}
def __init__(
self,
id=None, # type: str
name=None, # type: str
):
"""
Keyword args:
id (str): A globally unique, system-generated ID. The ID cannot be modified.
name (str): The resource name, such as volume name, pod name, snapshot name, and so on.
"""
if id is not None:
self.id = id
if name is not None:
self.name = name
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `Reference`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def __getitem__(self, key):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `Reference`".format(key))
return object.__getattribute__(self, key)
def __setitem__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `Reference`".format(key))
object.__setattr__(self, key, value)
def __delitem__(self, key):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `Reference`".format(key))
object.__delattr__(self, key)
def keys(self):
return self.attribute_map.keys()
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(Reference, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Reference):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
PypiClean
|
/comfylib-0.0.5-py3-none-any.whl/comfy/hazard/ldm/models/diffusion/ddpm.py
|
import torch
import torch.nn as nn
import numpy as np
# import pytorch_lightning as pl
from torch.optim.lr_scheduler import LambdaLR
from einops import rearrange, repeat
from contextlib import contextmanager, nullcontext
from functools import partial
import itertools
from tqdm import tqdm
from torchvision.utils import make_grid
# from pytorch_lightning.utilities.distributed import rank_zero_only
from omegaconf import ListConfig
from comfy.hazard.ldm.util import log_txt_as_img, exists, default, ismap, isimage, mean_flat, count_params, instantiate_from_config
from comfy.hazard.ldm.modules.ema import LitEma
from comfy.hazard.ldm.modules.distributions.distributions import normal_kl, DiagonalGaussianDistribution
from comfy.hazard.ldm.models.autoencoder import IdentityFirstStage, AutoencoderKL
from comfy.hazard.ldm.modules.diffusionmodules.util import make_beta_schedule, extract_into_tensor, noise_like
from comfy.hazard.ldm.models.diffusion.ddim import DDIMSampler
__conditioning_keys__ = {'concat': 'c_concat',
'crossattn': 'c_crossattn',
'adm': 'y'}
def disabled_train(self, mode=True):
"""Overwrite model.train with this function to make sure train/eval mode
does not change anymore."""
return self
def uniform_on_device(r1, r2, shape, device):
return (r1 - r2) * torch.rand(*shape, device=device) + r2
# class DDPM(pl.LightningModule):
class DDPM(torch.nn.Module):
# classic DDPM with Gaussian diffusion, in image space
def __init__(self,
unet_config,
timesteps=1000,
beta_schedule="linear",
loss_type="l2",
ckpt_path=None,
ignore_keys=[],
load_only_unet=False,
monitor="val/loss",
use_ema=True,
first_stage_key="image",
image_size=256,
channels=3,
log_every_t=100,
clip_denoised=True,
linear_start=1e-4,
linear_end=2e-2,
cosine_s=8e-3,
given_betas=None,
original_elbo_weight=0.,
v_posterior=0., # weight for choosing posterior variance as sigma = (1-v) * beta_tilde + v * beta
l_simple_weight=1.,
conditioning_key=None,
parameterization="eps", # all assuming fixed variance schedules
scheduler_config=None,
use_positional_encodings=False,
learn_logvar=False,
logvar_init=0.,
make_it_fit=False,
ucg_training=None,
reset_ema=False,
reset_num_ema_updates=False,
):
super().__init__()
assert parameterization in ["eps", "x0", "v"], 'currently only supporting "eps" and "x0" and "v"'
self.parameterization = parameterization
print(f"{self.__class__.__name__}: Running in {self.parameterization}-prediction mode")
self.cond_stage_model = None
self.clip_denoised = clip_denoised
self.log_every_t = log_every_t
self.first_stage_key = first_stage_key
self.image_size = image_size # try conv?
self.channels = channels
self.use_positional_encodings = use_positional_encodings
self.model = DiffusionWrapper(unet_config, conditioning_key)
count_params(self.model, verbose=True)
self.use_ema = use_ema
if self.use_ema:
self.model_ema = LitEma(self.model)
print(f"Keeping EMAs of {len(list(self.model_ema.buffers()))}.")
self.use_scheduler = scheduler_config is not None
if self.use_scheduler:
self.scheduler_config = scheduler_config
self.v_posterior = v_posterior
self.original_elbo_weight = original_elbo_weight
self.l_simple_weight = l_simple_weight
if monitor is not None:
self.monitor = monitor
self.make_it_fit = make_it_fit
if reset_ema: assert exists(ckpt_path)
if ckpt_path is not None:
self.init_from_ckpt(ckpt_path, ignore_keys=ignore_keys, only_model=load_only_unet)
if reset_ema:
assert self.use_ema
print(f"Resetting ema to pure model weights. This is useful when restoring from an ema-only checkpoint.")
self.model_ema = LitEma(self.model)
if reset_num_ema_updates:
print(" +++++++++++ WARNING: RESETTING NUM_EMA UPDATES TO ZERO +++++++++++ ")
assert self.use_ema
self.model_ema.reset_num_updates()
self.register_schedule(given_betas=given_betas, beta_schedule=beta_schedule, timesteps=timesteps,
linear_start=linear_start, linear_end=linear_end, cosine_s=cosine_s)
self.loss_type = loss_type
self.learn_logvar = learn_logvar
self.logvar = torch.full(fill_value=logvar_init, size=(self.num_timesteps,))
if self.learn_logvar:
self.logvar = nn.Parameter(self.logvar, requires_grad=True)
self.ucg_training = ucg_training or dict()
if self.ucg_training:
self.ucg_prng = np.random.RandomState()
def register_schedule(self, given_betas=None, beta_schedule="linear", timesteps=1000,
linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3):
if exists(given_betas):
betas = given_betas
else:
betas = make_beta_schedule(beta_schedule, timesteps, linear_start=linear_start, linear_end=linear_end,
cosine_s=cosine_s)
alphas = 1. - betas
alphas_cumprod = np.cumprod(alphas, axis=0)
alphas_cumprod_prev = np.append(1., alphas_cumprod[:-1])
timesteps, = betas.shape
self.num_timesteps = int(timesteps)
self.linear_start = linear_start
self.linear_end = linear_end
assert alphas_cumprod.shape[0] == self.num_timesteps, 'alphas have to be defined for each timestep'
to_torch = partial(torch.tensor, dtype=torch.float32)
self.register_buffer('betas', to_torch(betas))
self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod))
self.register_buffer('alphas_cumprod_prev', to_torch(alphas_cumprod_prev))
# calculations for diffusion q(x_t | x_{t-1}) and others
self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod)))
self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod)))
self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod)))
self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod)))
self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod - 1)))
# calculations for posterior q(x_{t-1} | x_t, x_0)
posterior_variance = (1 - self.v_posterior) * betas * (1. - alphas_cumprod_prev) / (
1. - alphas_cumprod) + self.v_posterior * betas
# above: equal to 1. / (1. / (1. - alpha_cumprod_tm1) + alpha_t / beta_t)
self.register_buffer('posterior_variance', to_torch(posterior_variance))
# below: log calculation clipped because the posterior variance is 0 at the beginning of the diffusion chain
self.register_buffer('posterior_log_variance_clipped', to_torch(np.log(np.maximum(posterior_variance, 1e-20))))
self.register_buffer('posterior_mean_coef1', to_torch(
betas * np.sqrt(alphas_cumprod_prev) / (1. - alphas_cumprod)))
self.register_buffer('posterior_mean_coef2', to_torch(
(1. - alphas_cumprod_prev) * np.sqrt(alphas) / (1. - alphas_cumprod)))
if self.parameterization == "eps":
lvlb_weights = self.betas ** 2 / (
2 * self.posterior_variance * to_torch(alphas) * (1 - self.alphas_cumprod))
elif self.parameterization == "x0":
lvlb_weights = 0.5 * np.sqrt(torch.Tensor(alphas_cumprod)) / (2. * 1 - torch.Tensor(alphas_cumprod))
elif self.parameterization == "v":
lvlb_weights = torch.ones_like(self.betas ** 2 / (
2 * self.posterior_variance * to_torch(alphas) * (1 - self.alphas_cumprod)))
else:
raise NotImplementedError("mu not supported")
lvlb_weights[0] = lvlb_weights[1]
self.register_buffer('lvlb_weights', lvlb_weights, persistent=False)
assert not torch.isnan(self.lvlb_weights).all()
@contextmanager
def ema_scope(self, context=None):
if self.use_ema:
self.model_ema.store(self.model.parameters())
self.model_ema.copy_to(self.model)
if context is not None:
print(f"{context}: Switched to EMA weights")
try:
yield None
finally:
if self.use_ema:
self.model_ema.restore(self.model.parameters())
if context is not None:
print(f"{context}: Restored training weights")
@torch.no_grad()
def init_from_ckpt(self, path, ignore_keys=list(), only_model=False):
sd = torch.load(path, map_location="cpu")
if "state_dict" in list(sd.keys()):
sd = sd["state_dict"]
keys = list(sd.keys())
for k in keys:
for ik in ignore_keys:
if k.startswith(ik):
print("Deleting key {} from state_dict.".format(k))
del sd[k]
if self.make_it_fit:
n_params = len([name for name, _ in
itertools.chain(self.named_parameters(),
self.named_buffers())])
for name, param in tqdm(
itertools.chain(self.named_parameters(),
self.named_buffers()),
desc="Fitting old weights to new weights",
total=n_params
):
if not name in sd:
continue
old_shape = sd[name].shape
new_shape = param.shape
assert len(old_shape) == len(new_shape)
if len(new_shape) > 2:
# we only modify first two axes
assert new_shape[2:] == old_shape[2:]
# assumes first axis corresponds to output dim
if not new_shape == old_shape:
new_param = param.clone()
old_param = sd[name]
if len(new_shape) == 1:
for i in range(new_param.shape[0]):
new_param[i] = old_param[i % old_shape[0]]
elif len(new_shape) >= 2:
for i in range(new_param.shape[0]):
for j in range(new_param.shape[1]):
new_param[i, j] = old_param[i % old_shape[0], j % old_shape[1]]
n_used_old = torch.ones(old_shape[1])
for j in range(new_param.shape[1]):
n_used_old[j % old_shape[1]] += 1
n_used_new = torch.zeros(new_shape[1])
for j in range(new_param.shape[1]):
n_used_new[j] = n_used_old[j % old_shape[1]]
n_used_new = n_used_new[None, :]
while len(n_used_new.shape) < len(new_shape):
n_used_new = n_used_new.unsqueeze(-1)
new_param /= n_used_new
sd[name] = new_param
missing, unexpected = self.load_state_dict(sd, strict=False) if not only_model else self.model.load_state_dict(
sd, strict=False)
print(f"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys")
if len(missing) > 0:
print(f"Missing Keys:\n {missing}")
if len(unexpected) > 0:
print(f"\nUnexpected Keys:\n {unexpected}")
def q_mean_variance(self, x_start, t):
"""
Get the distribution q(x_t | x_0).
:param x_start: the [N x C x ...] tensor of noiseless inputs.
:param t: the number of diffusion steps (minus 1). Here, 0 means one step.
:return: A tuple (mean, variance, log_variance), all of x_start's shape.
"""
mean = (extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start)
variance = extract_into_tensor(1.0 - self.alphas_cumprod, t, x_start.shape)
log_variance = extract_into_tensor(self.log_one_minus_alphas_cumprod, t, x_start.shape)
return mean, variance, log_variance
def predict_start_from_noise(self, x_t, t, noise):
return (
extract_into_tensor(self.sqrt_recip_alphas_cumprod, t, x_t.shape) * x_t -
extract_into_tensor(self.sqrt_recipm1_alphas_cumprod, t, x_t.shape) * noise
)
def predict_start_from_z_and_v(self, x_t, t, v):
# self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod)))
# self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod)))
return (
extract_into_tensor(self.sqrt_alphas_cumprod, t, x_t.shape) * x_t -
extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_t.shape) * v
)
def predict_eps_from_z_and_v(self, x_t, t, v):
return (
extract_into_tensor(self.sqrt_alphas_cumprod, t, x_t.shape) * v +
extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_t.shape) * x_t
)
def q_posterior(self, x_start, x_t, t):
posterior_mean = (
extract_into_tensor(self.posterior_mean_coef1, t, x_t.shape) * x_start +
extract_into_tensor(self.posterior_mean_coef2, t, x_t.shape) * x_t
)
posterior_variance = extract_into_tensor(self.posterior_variance, t, x_t.shape)
posterior_log_variance_clipped = extract_into_tensor(self.posterior_log_variance_clipped, t, x_t.shape)
return posterior_mean, posterior_variance, posterior_log_variance_clipped
def p_mean_variance(self, x, t, clip_denoised: bool):
model_out = self.model(x, t)
if self.parameterization == "eps":
x_recon = self.predict_start_from_noise(x, t=t, noise=model_out)
elif self.parameterization == "x0":
x_recon = model_out
if clip_denoised:
x_recon.clamp_(-1., 1.)
model_mean, posterior_variance, posterior_log_variance = self.q_posterior(x_start=x_recon, x_t=x, t=t)
return model_mean, posterior_variance, posterior_log_variance
@torch.no_grad()
def p_sample(self, x, t, clip_denoised=True, repeat_noise=False):
b, *_, device = *x.shape, x.device
model_mean, _, model_log_variance = self.p_mean_variance(x=x, t=t, clip_denoised=clip_denoised)
noise = noise_like(x.shape, device, repeat_noise)
# no noise when t == 0
nonzero_mask = (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1)))
return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise
@torch.no_grad()
def p_sample_loop(self, shape, return_intermediates=False):
device = self.betas.device
b = shape[0]
img = torch.randn(shape, device=device)
intermediates = [img]
for i in tqdm(reversed(range(0, self.num_timesteps)), desc='Sampling t', total=self.num_timesteps):
img = self.p_sample(img, torch.full((b,), i, device=device, dtype=torch.long),
clip_denoised=self.clip_denoised)
if i % self.log_every_t == 0 or i == self.num_timesteps - 1:
intermediates.append(img)
if return_intermediates:
return img, intermediates
return img
@torch.no_grad()
def sample(self, batch_size=16, return_intermediates=False):
image_size = self.image_size
channels = self.channels
return self.p_sample_loop((batch_size, channels, image_size, image_size),
return_intermediates=return_intermediates)
def q_sample(self, x_start, t, noise=None):
noise = default(noise, lambda: torch.randn_like(x_start))
return (extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start +
extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape) * noise)
def get_v(self, x, noise, t):
return (
extract_into_tensor(self.sqrt_alphas_cumprod, t, x.shape) * noise -
extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x.shape) * x
)
def get_loss(self, pred, target, mean=True):
if self.loss_type == 'l1':
loss = (target - pred).abs()
if mean:
loss = loss.mean()
elif self.loss_type == 'l2':
if mean:
loss = torch.nn.functional.mse_loss(target, pred)
else:
loss = torch.nn.functional.mse_loss(target, pred, reduction='none')
else:
raise NotImplementedError("unknown loss type '{loss_type}'")
return loss
def p_losses(self, x_start, t, noise=None):
noise = default(noise, lambda: torch.randn_like(x_start))
x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise)
model_out = self.model(x_noisy, t)
loss_dict = {}
if self.parameterization == "eps":
target = noise
elif self.parameterization == "x0":
target = x_start
elif self.parameterization == "v":
target = self.get_v(x_start, noise, t)
else:
raise NotImplementedError(f"Parameterization {self.parameterization} not yet supported")
loss = self.get_loss(model_out, target, mean=False).mean(dim=[1, 2, 3])
log_prefix = 'train' if self.training else 'val'
loss_dict.update({f'{log_prefix}/loss_simple': loss.mean()})
loss_simple = loss.mean() * self.l_simple_weight
loss_vlb = (self.lvlb_weights[t] * loss).mean()
loss_dict.update({f'{log_prefix}/loss_vlb': loss_vlb})
loss = loss_simple + self.original_elbo_weight * loss_vlb
loss_dict.update({f'{log_prefix}/loss': loss})
return loss, loss_dict
def forward(self, x, *args, **kwargs):
# b, c, h, w, device, img_size, = *x.shape, x.device, self.image_size
# assert h == img_size and w == img_size, f'height and width of image must be {img_size}'
t = torch.randint(0, self.num_timesteps, (x.shape[0],), device=self.device).long()
return self.p_losses(x, t, *args, **kwargs)
def get_input(self, batch, k):
x = batch[k]
if len(x.shape) == 3:
x = x[..., None]
x = rearrange(x, 'b h w c -> b c h w')
x = x.to(memory_format=torch.contiguous_format).float()
return x
def shared_step(self, batch):
x = self.get_input(batch, self.first_stage_key)
loss, loss_dict = self(x)
return loss, loss_dict
def training_step(self, batch, batch_idx):
for k in self.ucg_training:
p = self.ucg_training[k]["p"]
val = self.ucg_training[k]["val"]
if val is None:
val = ""
for i in range(len(batch[k])):
if self.ucg_prng.choice(2, p=[1 - p, p]):
batch[k][i] = val
loss, loss_dict = self.shared_step(batch)
self.log_dict(loss_dict, prog_bar=True,
logger=True, on_step=True, on_epoch=True)
self.log("global_step", self.global_step,
prog_bar=True, logger=True, on_step=True, on_epoch=False)
if self.use_scheduler:
lr = self.optimizers().param_groups[0]['lr']
self.log('lr_abs', lr, prog_bar=True, logger=True, on_step=True, on_epoch=False)
return loss
@torch.no_grad()
def validation_step(self, batch, batch_idx):
_, loss_dict_no_ema = self.shared_step(batch)
with self.ema_scope():
_, loss_dict_ema = self.shared_step(batch)
loss_dict_ema = {key + '_ema': loss_dict_ema[key] for key in loss_dict_ema}
self.log_dict(loss_dict_no_ema, prog_bar=False, logger=True, on_step=False, on_epoch=True)
self.log_dict(loss_dict_ema, prog_bar=False, logger=True, on_step=False, on_epoch=True)
def on_train_batch_end(self, *args, **kwargs):
if self.use_ema:
self.model_ema(self.model)
def _get_rows_from_list(self, samples):
n_imgs_per_row = len(samples)
denoise_grid = rearrange(samples, 'n b c h w -> b n c h w')
denoise_grid = rearrange(denoise_grid, 'b n c h w -> (b n) c h w')
denoise_grid = make_grid(denoise_grid, nrow=n_imgs_per_row)
return denoise_grid
@torch.no_grad()
def log_images(self, batch, N=8, n_row=2, sample=True, return_keys=None, **kwargs):
log = dict()
x = self.get_input(batch, self.first_stage_key)
N = min(x.shape[0], N)
n_row = min(x.shape[0], n_row)
x = x.to(self.device)[:N]
log["inputs"] = x
# get diffusion row
diffusion_row = list()
x_start = x[:n_row]
for t in range(self.num_timesteps):
if t % self.log_every_t == 0 or t == self.num_timesteps - 1:
t = repeat(torch.tensor([t]), '1 -> b', b=n_row)
t = t.to(self.device).long()
noise = torch.randn_like(x_start)
x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise)
diffusion_row.append(x_noisy)
log["diffusion_row"] = self._get_rows_from_list(diffusion_row)
if sample:
# get denoise row
with self.ema_scope("Plotting"):
samples, denoise_row = self.sample(batch_size=N, return_intermediates=True)
log["samples"] = samples
log["denoise_row"] = self._get_rows_from_list(denoise_row)
if return_keys:
if np.intersect1d(list(log.keys()), return_keys).shape[0] == 0:
return log
else:
return {key: log[key] for key in return_keys}
return log
def configure_optimizers(self):
lr = self.learning_rate
params = list(self.model.parameters())
if self.learn_logvar:
params = params + [self.logvar]
opt = torch.optim.AdamW(params, lr=lr)
return opt
class LatentDiffusion(DDPM):
"""main class"""
def __init__(self,
first_stage_config={},
cond_stage_config={},
num_timesteps_cond=None,
cond_stage_key="image",
cond_stage_trainable=False,
concat_mode=True,
cond_stage_forward=None,
conditioning_key=None,
scale_factor=1.0,
scale_by_std=False,
force_null_conditioning=False,
*args, **kwargs):
self.force_null_conditioning = force_null_conditioning
self.num_timesteps_cond = default(num_timesteps_cond, 1)
self.scale_by_std = scale_by_std
assert self.num_timesteps_cond <= kwargs['timesteps']
# for backwards compatibility after implementation of DiffusionWrapper
if conditioning_key is None:
conditioning_key = 'concat' if concat_mode else 'crossattn'
if cond_stage_config == '__is_unconditional__' and not self.force_null_conditioning:
conditioning_key = None
ckpt_path = kwargs.pop("ckpt_path", None)
reset_ema = kwargs.pop("reset_ema", False)
reset_num_ema_updates = kwargs.pop("reset_num_ema_updates", False)
ignore_keys = kwargs.pop("ignore_keys", [])
super().__init__(conditioning_key=conditioning_key, *args, **kwargs)
self.concat_mode = concat_mode
self.cond_stage_trainable = cond_stage_trainable
self.cond_stage_key = cond_stage_key
try:
self.num_downs = len(first_stage_config.params.ddconfig.ch_mult) - 1
except:
self.num_downs = 0
if not scale_by_std:
self.scale_factor = scale_factor
else:
self.register_buffer('scale_factor', torch.tensor(scale_factor))
# self.instantiate_first_stage(first_stage_config)
# self.instantiate_cond_stage(cond_stage_config)
self.cond_stage_forward = cond_stage_forward
self.clip_denoised = False
self.bbox_tokenizer = None
self.restarted_from_ckpt = False
if ckpt_path is not None:
self.init_from_ckpt(ckpt_path, ignore_keys)
self.restarted_from_ckpt = True
if reset_ema:
assert self.use_ema
print(
f"Resetting ema to pure model weights. This is useful when restoring from an ema-only checkpoint.")
self.model_ema = LitEma(self.model)
if reset_num_ema_updates:
print(" +++++++++++ WARNING: RESETTING NUM_EMA UPDATES TO ZERO +++++++++++ ")
assert self.use_ema
self.model_ema.reset_num_updates()
def make_cond_schedule(self, ):
self.cond_ids = torch.full(size=(self.num_timesteps,), fill_value=self.num_timesteps - 1, dtype=torch.long)
ids = torch.round(torch.linspace(0, self.num_timesteps - 1, self.num_timesteps_cond)).long()
self.cond_ids[:self.num_timesteps_cond] = ids
# @rank_zero_only
@torch.no_grad()
def on_train_batch_start(self, batch, batch_idx, dataloader_idx):
# only for very first batch
if self.scale_by_std and self.current_epoch == 0 and self.global_step == 0 and batch_idx == 0 and not self.restarted_from_ckpt:
assert self.scale_factor == 1., 'rather not use custom rescaling and std-rescaling simultaneously'
# set rescale weight to 1./std of encodings
print("### USING STD-RESCALING ###")
x = super().get_input(batch, self.first_stage_key)
x = x.to(self.device)
encoder_posterior = self.encode_first_stage(x)
z = self.get_first_stage_encoding(encoder_posterior).detach()
del self.scale_factor
self.register_buffer('scale_factor', 1. / z.flatten().std())
print(f"setting self.scale_factor to {self.scale_factor}")
print("### USING STD-RESCALING ###")
def register_schedule(self,
given_betas=None, beta_schedule="linear", timesteps=1000,
linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3):
super().register_schedule(given_betas, beta_schedule, timesteps, linear_start, linear_end, cosine_s)
self.shorten_cond_schedule = self.num_timesteps_cond > 1
if self.shorten_cond_schedule:
self.make_cond_schedule()
def instantiate_first_stage(self, config):
model = instantiate_from_config(config)
self.first_stage_model = model.eval()
self.first_stage_model.train = disabled_train
for param in self.first_stage_model.parameters():
param.requires_grad = False
def instantiate_cond_stage(self, config):
if not self.cond_stage_trainable:
if config == "__is_first_stage__":
print("Using first stage also as cond stage.")
self.cond_stage_model = self.first_stage_model
elif config == "__is_unconditional__":
print(f"Training {self.__class__.__name__} as an unconditional model.")
self.cond_stage_model = None
# self.be_unconditional = True
else:
model = instantiate_from_config(config)
self.cond_stage_model = model.eval()
self.cond_stage_model.train = disabled_train
for param in self.cond_stage_model.parameters():
param.requires_grad = False
else:
assert config != '__is_first_stage__'
assert config != '__is_unconditional__'
model = instantiate_from_config(config)
self.cond_stage_model = model
def _get_denoise_row_from_list(self, samples, desc='', force_no_decoder_quantization=False):
denoise_row = []
for zd in tqdm(samples, desc=desc):
denoise_row.append(self.decode_first_stage(zd.to(self.device),
force_not_quantize=force_no_decoder_quantization))
n_imgs_per_row = len(denoise_row)
denoise_row = torch.stack(denoise_row) # n_log_step, n_row, C, H, W
denoise_grid = rearrange(denoise_row, 'n b c h w -> b n c h w')
denoise_grid = rearrange(denoise_grid, 'b n c h w -> (b n) c h w')
denoise_grid = make_grid(denoise_grid, nrow=n_imgs_per_row)
return denoise_grid
def get_first_stage_encoding(self, encoder_posterior):
if isinstance(encoder_posterior, DiagonalGaussianDistribution):
z = encoder_posterior.sample()
elif isinstance(encoder_posterior, torch.Tensor):
z = encoder_posterior
else:
raise NotImplementedError(f"encoder_posterior of type '{type(encoder_posterior)}' not yet implemented")
return self.scale_factor * z
def get_learned_conditioning(self, c):
if self.cond_stage_forward is None:
if hasattr(self.cond_stage_model, 'encode') and callable(self.cond_stage_model.encode):
c = self.cond_stage_model.encode(c)
if isinstance(c, DiagonalGaussianDistribution):
c = c.mode()
else:
c = self.cond_stage_model(c)
else:
assert hasattr(self.cond_stage_model, self.cond_stage_forward)
c = getattr(self.cond_stage_model, self.cond_stage_forward)(c)
return c
def meshgrid(self, h, w):
y = torch.arange(0, h).view(h, 1, 1).repeat(1, w, 1)
x = torch.arange(0, w).view(1, w, 1).repeat(h, 1, 1)
arr = torch.cat([y, x], dim=-1)
return arr
def delta_border(self, h, w):
"""
:param h: height
:param w: width
:return: normalized distance to image border,
wtith min distance = 0 at border and max dist = 0.5 at image center
"""
lower_right_corner = torch.tensor([h - 1, w - 1]).view(1, 1, 2)
arr = self.meshgrid(h, w) / lower_right_corner
dist_left_up = torch.min(arr, dim=-1, keepdims=True)[0]
dist_right_down = torch.min(1 - arr, dim=-1, keepdims=True)[0]
edge_dist = torch.min(torch.cat([dist_left_up, dist_right_down], dim=-1), dim=-1)[0]
return edge_dist
def get_weighting(self, h, w, Ly, Lx, device):
weighting = self.delta_border(h, w)
weighting = torch.clip(weighting, self.split_input_params["clip_min_weight"],
self.split_input_params["clip_max_weight"], )
weighting = weighting.view(1, h * w, 1).repeat(1, 1, Ly * Lx).to(device)
if self.split_input_params["tie_braker"]:
L_weighting = self.delta_border(Ly, Lx)
L_weighting = torch.clip(L_weighting,
self.split_input_params["clip_min_tie_weight"],
self.split_input_params["clip_max_tie_weight"])
L_weighting = L_weighting.view(1, 1, Ly * Lx).to(device)
weighting = weighting * L_weighting
return weighting
def get_fold_unfold(self, x, kernel_size, stride, uf=1, df=1): # todo load once not every time, shorten code
"""
:param x: img of size (bs, c, h, w)
:return: n img crops of size (n, bs, c, kernel_size[0], kernel_size[1])
"""
bs, nc, h, w = x.shape
# number of crops in image
Ly = (h - kernel_size[0]) // stride[0] + 1
Lx = (w - kernel_size[1]) // stride[1] + 1
if uf == 1 and df == 1:
fold_params = dict(kernel_size=kernel_size, dilation=1, padding=0, stride=stride)
unfold = torch.nn.Unfold(**fold_params)
fold = torch.nn.Fold(output_size=x.shape[2:], **fold_params)
weighting = self.get_weighting(kernel_size[0], kernel_size[1], Ly, Lx, x.device).to(x.dtype)
normalization = fold(weighting).view(1, 1, h, w) # normalizes the overlap
weighting = weighting.view((1, 1, kernel_size[0], kernel_size[1], Ly * Lx))
elif uf > 1 and df == 1:
fold_params = dict(kernel_size=kernel_size, dilation=1, padding=0, stride=stride)
unfold = torch.nn.Unfold(**fold_params)
fold_params2 = dict(kernel_size=(kernel_size[0] * uf, kernel_size[0] * uf),
dilation=1, padding=0,
stride=(stride[0] * uf, stride[1] * uf))
fold = torch.nn.Fold(output_size=(x.shape[2] * uf, x.shape[3] * uf), **fold_params2)
weighting = self.get_weighting(kernel_size[0] * uf, kernel_size[1] * uf, Ly, Lx, x.device).to(x.dtype)
normalization = fold(weighting).view(1, 1, h * uf, w * uf) # normalizes the overlap
weighting = weighting.view((1, 1, kernel_size[0] * uf, kernel_size[1] * uf, Ly * Lx))
elif df > 1 and uf == 1:
fold_params = dict(kernel_size=kernel_size, dilation=1, padding=0, stride=stride)
unfold = torch.nn.Unfold(**fold_params)
fold_params2 = dict(kernel_size=(kernel_size[0] // df, kernel_size[0] // df),
dilation=1, padding=0,
stride=(stride[0] // df, stride[1] // df))
fold = torch.nn.Fold(output_size=(x.shape[2] // df, x.shape[3] // df), **fold_params2)
weighting = self.get_weighting(kernel_size[0] // df, kernel_size[1] // df, Ly, Lx, x.device).to(x.dtype)
normalization = fold(weighting).view(1, 1, h // df, w // df) # normalizes the overlap
weighting = weighting.view((1, 1, kernel_size[0] // df, kernel_size[1] // df, Ly * Lx))
else:
raise NotImplementedError
return fold, unfold, normalization, weighting
@torch.no_grad()
def get_input(self, batch, k, return_first_stage_outputs=False, force_c_encode=False,
cond_key=None, return_original_cond=False, bs=None, return_x=False):
x = super().get_input(batch, k)
if bs is not None:
x = x[:bs]
x = x.to(self.device)
encoder_posterior = self.encode_first_stage(x)
z = self.get_first_stage_encoding(encoder_posterior).detach()
if self.model.conditioning_key is not None and not self.force_null_conditioning:
if cond_key is None:
cond_key = self.cond_stage_key
if cond_key != self.first_stage_key:
if cond_key in ['caption', 'coordinates_bbox', "txt"]:
xc = batch[cond_key]
elif cond_key in ['class_label', 'cls']:
xc = batch
else:
xc = super().get_input(batch, cond_key).to(self.device)
else:
xc = x
if not self.cond_stage_trainable or force_c_encode:
if isinstance(xc, dict) or isinstance(xc, list):
c = self.get_learned_conditioning(xc)
else:
c = self.get_learned_conditioning(xc.to(self.device))
else:
c = xc
if bs is not None:
c = c[:bs]
if self.use_positional_encodings:
pos_x, pos_y = self.compute_latent_shifts(batch)
ckey = __conditioning_keys__[self.model.conditioning_key]
c = {ckey: c, 'pos_x': pos_x, 'pos_y': pos_y}
else:
c = None
xc = None
if self.use_positional_encodings:
pos_x, pos_y = self.compute_latent_shifts(batch)
c = {'pos_x': pos_x, 'pos_y': pos_y}
out = [z, c]
if return_first_stage_outputs:
xrec = self.decode_first_stage(z)
out.extend([x, xrec])
if return_x:
out.extend([x])
if return_original_cond:
out.append(xc)
return out
@torch.no_grad()
def decode_first_stage(self, z, predict_cids=False, force_not_quantize=False):
if predict_cids:
if z.dim() == 4:
z = torch.argmax(z.exp(), dim=1).long()
z = self.first_stage_model.quantize.get_codebook_entry(z, shape=None)
z = rearrange(z, 'b h w c -> b c h w').contiguous()
z = 1. / self.scale_factor * z
return self.first_stage_model.decode(z)
@torch.no_grad()
def encode_first_stage(self, x):
return self.first_stage_model.encode(x)
def shared_step(self, batch, **kwargs):
x, c = self.get_input(batch, self.first_stage_key)
loss = self(x, c)
return loss
def forward(self, x, c, *args, **kwargs):
t = torch.randint(0, self.num_timesteps, (x.shape[0],), device=self.device).long()
if self.model.conditioning_key is not None:
assert c is not None
if self.cond_stage_trainable:
c = self.get_learned_conditioning(c)
if self.shorten_cond_schedule: # TODO: drop this option
tc = self.cond_ids[t].to(self.device)
c = self.q_sample(x_start=c, t=tc, noise=torch.randn_like(c.float()))
return self.p_losses(x, c, t, *args, **kwargs)
def apply_model(self, x_noisy, t, cond, return_ids=False):
if isinstance(cond, dict):
# hybrid case, cond is expected to be a dict
pass
else:
if not isinstance(cond, list):
cond = [cond]
key = 'c_concat' if self.model.conditioning_key == 'concat' else 'c_crossattn'
cond = {key: cond}
x_recon = self.model(x_noisy, t, **cond)
if isinstance(x_recon, tuple) and not return_ids:
return x_recon[0]
else:
return x_recon
def _predict_eps_from_xstart(self, x_t, t, pred_xstart):
return (extract_into_tensor(self.sqrt_recip_alphas_cumprod, t, x_t.shape) * x_t - pred_xstart) / \
extract_into_tensor(self.sqrt_recipm1_alphas_cumprod, t, x_t.shape)
def _prior_bpd(self, x_start):
"""
Get the prior KL term for the variational lower-bound, measured in
bits-per-dim.
This term can't be optimized, as it only depends on the encoder.
:param x_start: the [N x C x ...] tensor of inputs.
:return: a batch of [N] KL values (in bits), one per batch element.
"""
batch_size = x_start.shape[0]
t = torch.tensor([self.num_timesteps - 1] * batch_size, device=x_start.device)
qt_mean, _, qt_log_variance = self.q_mean_variance(x_start, t)
kl_prior = normal_kl(mean1=qt_mean, logvar1=qt_log_variance, mean2=0.0, logvar2=0.0)
return mean_flat(kl_prior) / np.log(2.0)
def p_losses(self, x_start, cond, t, noise=None):
noise = default(noise, lambda: torch.randn_like(x_start))
x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise)
model_output = self.apply_model(x_noisy, t, cond)
loss_dict = {}
prefix = 'train' if self.training else 'val'
if self.parameterization == "x0":
target = x_start
elif self.parameterization == "eps":
target = noise
elif self.parameterization == "v":
target = self.get_v(x_start, noise, t)
else:
raise NotImplementedError()
loss_simple = self.get_loss(model_output, target, mean=False).mean([1, 2, 3])
loss_dict.update({f'{prefix}/loss_simple': loss_simple.mean()})
logvar_t = self.logvar[t].to(self.device)
loss = loss_simple / torch.exp(logvar_t) + logvar_t
# loss = loss_simple / torch.exp(self.logvar) + self.logvar
if self.learn_logvar:
loss_dict.update({f'{prefix}/loss_gamma': loss.mean()})
loss_dict.update({'logvar': self.logvar.data.mean()})
loss = self.l_simple_weight * loss.mean()
loss_vlb = self.get_loss(model_output, target, mean=False).mean(dim=(1, 2, 3))
loss_vlb = (self.lvlb_weights[t] * loss_vlb).mean()
loss_dict.update({f'{prefix}/loss_vlb': loss_vlb})
loss += (self.original_elbo_weight * loss_vlb)
loss_dict.update({f'{prefix}/loss': loss})
return loss, loss_dict
def p_mean_variance(self, x, c, t, clip_denoised: bool, return_codebook_ids=False, quantize_denoised=False,
return_x0=False, score_corrector=None, corrector_kwargs=None):
t_in = t
model_out = self.apply_model(x, t_in, c, return_ids=return_codebook_ids)
if score_corrector is not None:
assert self.parameterization == "eps"
model_out = score_corrector.modify_score(self, model_out, x, t, c, **corrector_kwargs)
if return_codebook_ids:
model_out, logits = model_out
if self.parameterization == "eps":
x_recon = self.predict_start_from_noise(x, t=t, noise=model_out)
elif self.parameterization == "x0":
x_recon = model_out
else:
raise NotImplementedError()
if clip_denoised:
x_recon.clamp_(-1., 1.)
if quantize_denoised:
x_recon, _, [_, _, indices] = self.first_stage_model.quantize(x_recon)
model_mean, posterior_variance, posterior_log_variance = self.q_posterior(x_start=x_recon, x_t=x, t=t)
if return_codebook_ids:
return model_mean, posterior_variance, posterior_log_variance, logits
elif return_x0:
return model_mean, posterior_variance, posterior_log_variance, x_recon
else:
return model_mean, posterior_variance, posterior_log_variance
@torch.no_grad()
def p_sample(self, x, c, t, clip_denoised=False, repeat_noise=False,
return_codebook_ids=False, quantize_denoised=False, return_x0=False,
temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None):
b, *_, device = *x.shape, x.device
outputs = self.p_mean_variance(x=x, c=c, t=t, clip_denoised=clip_denoised,
return_codebook_ids=return_codebook_ids,
quantize_denoised=quantize_denoised,
return_x0=return_x0,
score_corrector=score_corrector, corrector_kwargs=corrector_kwargs)
if return_codebook_ids:
raise DeprecationWarning("Support dropped.")
model_mean, _, model_log_variance, logits = outputs
elif return_x0:
model_mean, _, model_log_variance, x0 = outputs
else:
model_mean, _, model_log_variance = outputs
noise = noise_like(x.shape, device, repeat_noise) * temperature
if noise_dropout > 0.:
noise = torch.nn.functional.dropout(noise, p=noise_dropout)
# no noise when t == 0
nonzero_mask = (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1)))
if return_codebook_ids:
return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise, logits.argmax(dim=1)
if return_x0:
return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise, x0
else:
return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise
@torch.no_grad()
def progressive_denoising(self, cond, shape, verbose=True, callback=None, quantize_denoised=False,
img_callback=None, mask=None, x0=None, temperature=1., noise_dropout=0.,
score_corrector=None, corrector_kwargs=None, batch_size=None, x_T=None, start_T=None,
log_every_t=None):
if not log_every_t:
log_every_t = self.log_every_t
timesteps = self.num_timesteps
if batch_size is not None:
b = batch_size if batch_size is not None else shape[0]
shape = [batch_size] + list(shape)
else:
b = batch_size = shape[0]
if x_T is None:
img = torch.randn(shape, device=self.device)
else:
img = x_T
intermediates = []
if cond is not None:
if isinstance(cond, dict):
cond = {key: cond[key][:batch_size] if not isinstance(cond[key], list) else
list(map(lambda x: x[:batch_size], cond[key])) for key in cond}
else:
cond = [c[:batch_size] for c in cond] if isinstance(cond, list) else cond[:batch_size]
if start_T is not None:
timesteps = min(timesteps, start_T)
iterator = tqdm(reversed(range(0, timesteps)), desc='Progressive Generation',
total=timesteps) if verbose else reversed(
range(0, timesteps))
if type(temperature) == float:
temperature = [temperature] * timesteps
for i in iterator:
ts = torch.full((b,), i, device=self.device, dtype=torch.long)
if self.shorten_cond_schedule:
assert self.model.conditioning_key != 'hybrid'
tc = self.cond_ids[ts].to(cond.device)
cond = self.q_sample(x_start=cond, t=tc, noise=torch.randn_like(cond))
img, x0_partial = self.p_sample(img, cond, ts,
clip_denoised=self.clip_denoised,
quantize_denoised=quantize_denoised, return_x0=True,
temperature=temperature[i], noise_dropout=noise_dropout,
score_corrector=score_corrector, corrector_kwargs=corrector_kwargs)
if mask is not None:
assert x0 is not None
img_orig = self.q_sample(x0, ts)
img = img_orig * mask + (1. - mask) * img
if i % log_every_t == 0 or i == timesteps - 1:
intermediates.append(x0_partial)
if callback: callback(i)
if img_callback: img_callback(img, i)
return img, intermediates
@torch.no_grad()
def p_sample_loop(self, cond, shape, return_intermediates=False,
x_T=None, verbose=True, callback=None, timesteps=None, quantize_denoised=False,
mask=None, x0=None, img_callback=None, start_T=None,
log_every_t=None):
if not log_every_t:
log_every_t = self.log_every_t
device = self.betas.device
b = shape[0]
if x_T is None:
img = torch.randn(shape, device=device)
else:
img = x_T
intermediates = [img]
if timesteps is None:
timesteps = self.num_timesteps
if start_T is not None:
timesteps = min(timesteps, start_T)
iterator = tqdm(reversed(range(0, timesteps)), desc='Sampling t', total=timesteps) if verbose else reversed(
range(0, timesteps))
if mask is not None:
assert x0 is not None
assert x0.shape[2:3] == mask.shape[2:3] # spatial size has to match
for i in iterator:
ts = torch.full((b,), i, device=device, dtype=torch.long)
if self.shorten_cond_schedule:
assert self.model.conditioning_key != 'hybrid'
tc = self.cond_ids[ts].to(cond.device)
cond = self.q_sample(x_start=cond, t=tc, noise=torch.randn_like(cond))
img = self.p_sample(img, cond, ts,
clip_denoised=self.clip_denoised,
quantize_denoised=quantize_denoised)
if mask is not None:
img_orig = self.q_sample(x0, ts)
img = img_orig * mask + (1. - mask) * img
if i % log_every_t == 0 or i == timesteps - 1:
intermediates.append(img)
if callback: callback(i)
if img_callback: img_callback(img, i)
if return_intermediates:
return img, intermediates
return img
@torch.no_grad()
def sample(self, cond, batch_size=16, return_intermediates=False, x_T=None,
verbose=True, timesteps=None, quantize_denoised=False,
mask=None, x0=None, shape=None, **kwargs):
if shape is None:
shape = (batch_size, self.channels, self.image_size, self.image_size)
if cond is not None:
if isinstance(cond, dict):
cond = {key: cond[key][:batch_size] if not isinstance(cond[key], list) else
list(map(lambda x: x[:batch_size], cond[key])) for key in cond}
else:
cond = [c[:batch_size] for c in cond] if isinstance(cond, list) else cond[:batch_size]
return self.p_sample_loop(cond,
shape,
return_intermediates=return_intermediates, x_T=x_T,
verbose=verbose, timesteps=timesteps, quantize_denoised=quantize_denoised,
mask=mask, x0=x0)
@torch.no_grad()
def sample_log(self, cond, batch_size, ddim, ddim_steps, **kwargs):
if ddim:
ddim_sampler = DDIMSampler(self)
shape = (self.channels, self.image_size, self.image_size)
samples, intermediates = ddim_sampler.sample(ddim_steps, batch_size,
shape, cond, verbose=False, **kwargs)
else:
samples, intermediates = self.sample(cond=cond, batch_size=batch_size,
return_intermediates=True, **kwargs)
return samples, intermediates
@torch.no_grad()
def get_unconditional_conditioning(self, batch_size, null_label=None):
if null_label is not None:
xc = null_label
if isinstance(xc, ListConfig):
xc = list(xc)
if isinstance(xc, dict) or isinstance(xc, list):
c = self.get_learned_conditioning(xc)
else:
if hasattr(xc, "to"):
xc = xc.to(self.device)
c = self.get_learned_conditioning(xc)
else:
if self.cond_stage_key in ["class_label", "cls"]:
xc = self.cond_stage_model.get_unconditional_conditioning(batch_size, device=self.device)
return self.get_learned_conditioning(xc)
else:
raise NotImplementedError("todo")
if isinstance(c, list): # in case the encoder gives us a list
for i in range(len(c)):
c[i] = repeat(c[i], '1 ... -> b ...', b=batch_size).to(self.device)
else:
c = repeat(c, '1 ... -> b ...', b=batch_size).to(self.device)
return c
@torch.no_grad()
def log_images(self, batch, N=8, n_row=4, sample=True, ddim_steps=50, ddim_eta=0., return_keys=None,
quantize_denoised=True, inpaint=True, plot_denoise_rows=False, plot_progressive_rows=True,
plot_diffusion_rows=True, unconditional_guidance_scale=1., unconditional_guidance_label=None,
use_ema_scope=True,
**kwargs):
ema_scope = self.ema_scope if use_ema_scope else nullcontext
use_ddim = ddim_steps is not None
log = dict()
z, c, x, xrec, xc = self.get_input(batch, self.first_stage_key,
return_first_stage_outputs=True,
force_c_encode=True,
return_original_cond=True,
bs=N)
N = min(x.shape[0], N)
n_row = min(x.shape[0], n_row)
log["inputs"] = x
log["reconstruction"] = xrec
if self.model.conditioning_key is not None:
if hasattr(self.cond_stage_model, "decode"):
xc = self.cond_stage_model.decode(c)
log["conditioning"] = xc
elif self.cond_stage_key in ["caption", "txt"]:
xc = log_txt_as_img((x.shape[2], x.shape[3]), batch[self.cond_stage_key], size=x.shape[2] // 25)
log["conditioning"] = xc
elif self.cond_stage_key in ['class_label', "cls"]:
try:
xc = log_txt_as_img((x.shape[2], x.shape[3]), batch["human_label"], size=x.shape[2] // 25)
log['conditioning'] = xc
except KeyError:
# probably no "human_label" in batch
pass
elif isimage(xc):
log["conditioning"] = xc
if ismap(xc):
log["original_conditioning"] = self.to_rgb(xc)
if plot_diffusion_rows:
# get diffusion row
diffusion_row = list()
z_start = z[:n_row]
for t in range(self.num_timesteps):
if t % self.log_every_t == 0 or t == self.num_timesteps - 1:
t = repeat(torch.tensor([t]), '1 -> b', b=n_row)
t = t.to(self.device).long()
noise = torch.randn_like(z_start)
z_noisy = self.q_sample(x_start=z_start, t=t, noise=noise)
diffusion_row.append(self.decode_first_stage(z_noisy))
diffusion_row = torch.stack(diffusion_row) # n_log_step, n_row, C, H, W
diffusion_grid = rearrange(diffusion_row, 'n b c h w -> b n c h w')
diffusion_grid = rearrange(diffusion_grid, 'b n c h w -> (b n) c h w')
diffusion_grid = make_grid(diffusion_grid, nrow=diffusion_row.shape[0])
log["diffusion_row"] = diffusion_grid
if sample:
# get denoise row
with ema_scope("Sampling"):
samples, z_denoise_row = self.sample_log(cond=c, batch_size=N, ddim=use_ddim,
ddim_steps=ddim_steps, eta=ddim_eta)
# samples, z_denoise_row = self.sample(cond=c, batch_size=N, return_intermediates=True)
x_samples = self.decode_first_stage(samples)
log["samples"] = x_samples
if plot_denoise_rows:
denoise_grid = self._get_denoise_row_from_list(z_denoise_row)
log["denoise_row"] = denoise_grid
if quantize_denoised and not isinstance(self.first_stage_model, AutoencoderKL) and not isinstance(
self.first_stage_model, IdentityFirstStage):
# also display when quantizing x0 while sampling
with ema_scope("Plotting Quantized Denoised"):
samples, z_denoise_row = self.sample_log(cond=c, batch_size=N, ddim=use_ddim,
ddim_steps=ddim_steps, eta=ddim_eta,
quantize_denoised=True)
# samples, z_denoise_row = self.sample(cond=c, batch_size=N, return_intermediates=True,
# quantize_denoised=True)
x_samples = self.decode_first_stage(samples.to(self.device))
log["samples_x0_quantized"] = x_samples
if unconditional_guidance_scale > 1.0:
uc = self.get_unconditional_conditioning(N, unconditional_guidance_label)
if self.model.conditioning_key == "crossattn-adm":
uc = {"c_crossattn": [uc], "c_adm": c["c_adm"]}
with ema_scope("Sampling with classifier-free guidance"):
samples_cfg, _ = self.sample_log(cond=c, batch_size=N, ddim=use_ddim,
ddim_steps=ddim_steps, eta=ddim_eta,
unconditional_guidance_scale=unconditional_guidance_scale,
unconditional_conditioning=uc,
)
x_samples_cfg = self.decode_first_stage(samples_cfg)
log[f"samples_cfg_scale_{unconditional_guidance_scale:.2f}"] = x_samples_cfg
if inpaint:
# make a simple center square
b, h, w = z.shape[0], z.shape[2], z.shape[3]
mask = torch.ones(N, h, w).to(self.device)
# zeros will be filled in
mask[:, h // 4:3 * h // 4, w // 4:3 * w // 4] = 0.
mask = mask[:, None, ...]
with ema_scope("Plotting Inpaint"):
samples, _ = self.sample_log(cond=c, batch_size=N, ddim=use_ddim, eta=ddim_eta,
ddim_steps=ddim_steps, x0=z[:N], mask=mask)
x_samples = self.decode_first_stage(samples.to(self.device))
log["samples_inpainting"] = x_samples
log["mask"] = mask
# outpaint
mask = 1. - mask
with ema_scope("Plotting Outpaint"):
samples, _ = self.sample_log(cond=c, batch_size=N, ddim=use_ddim, eta=ddim_eta,
ddim_steps=ddim_steps, x0=z[:N], mask=mask)
x_samples = self.decode_first_stage(samples.to(self.device))
log["samples_outpainting"] = x_samples
if plot_progressive_rows:
with ema_scope("Plotting Progressives"):
img, progressives = self.progressive_denoising(c,
shape=(self.channels, self.image_size, self.image_size),
batch_size=N)
prog_row = self._get_denoise_row_from_list(progressives, desc="Progressive Generation")
log["progressive_row"] = prog_row
if return_keys:
if np.intersect1d(list(log.keys()), return_keys).shape[0] == 0:
return log
else:
return {key: log[key] for key in return_keys}
return log
def configure_optimizers(self):
lr = self.learning_rate
params = list(self.model.parameters())
if self.cond_stage_trainable:
print(f"{self.__class__.__name__}: Also optimizing conditioner params!")
params = params + list(self.cond_stage_model.parameters())
if self.learn_logvar:
print('Diffusion model optimizing logvar')
params.append(self.logvar)
opt = torch.optim.AdamW(params, lr=lr)
if self.use_scheduler:
assert 'target' in self.scheduler_config
scheduler = instantiate_from_config(self.scheduler_config)
print("Setting up LambdaLR scheduler...")
scheduler = [
{
'scheduler': LambdaLR(opt, lr_lambda=scheduler.schedule),
'interval': 'step',
'frequency': 1
}]
return [opt], scheduler
return opt
@torch.no_grad()
def to_rgb(self, x):
x = x.float()
if not hasattr(self, "colorize"):
self.colorize = torch.randn(3, x.shape[1], 1, 1).to(x)
x = nn.functional.conv2d(x, weight=self.colorize)
x = 2. * (x - x.min()) / (x.max() - x.min()) - 1.
return x
# class DiffusionWrapper(pl.LightningModule):
class DiffusionWrapper(torch.nn.Module):
def __init__(self, diff_model_config, conditioning_key):
super().__init__()
self.sequential_cross_attn = diff_model_config.pop("sequential_crossattn", False)
self.diffusion_model = instantiate_from_config(diff_model_config)
self.conditioning_key = conditioning_key
assert self.conditioning_key in [None, 'concat', 'crossattn', 'hybrid', 'adm', 'hybrid-adm', 'crossattn-adm']
def forward(self, x, t, c_concat: list = None, c_crossattn: list = None, c_adm=None, control=None):
if self.conditioning_key is None:
out = self.diffusion_model(x, t, control=control)
elif self.conditioning_key == 'concat':
xc = torch.cat([x] + c_concat, dim=1)
out = self.diffusion_model(xc, t, control=control)
elif self.conditioning_key == 'crossattn':
if not self.sequential_cross_attn:
cc = torch.cat(c_crossattn, 1)
else:
cc = c_crossattn
if hasattr(self, "scripted_diffusion_model"):
# TorchScript changes names of the arguments
# with argument cc defined as context=cc scripted model will produce
# an error: RuntimeError: forward() is missing value for argument 'argument_3'.
out = self.scripted_diffusion_model(x, t, cc, control=control)
else:
out = self.diffusion_model(x, t, context=cc, control=control)
elif self.conditioning_key == 'hybrid':
xc = torch.cat([x] + c_concat, dim=1)
cc = torch.cat(c_crossattn, 1)
out = self.diffusion_model(xc, t, context=cc, control=control)
elif self.conditioning_key == 'hybrid-adm':
assert c_adm is not None
xc = torch.cat([x] + c_concat, dim=1)
cc = torch.cat(c_crossattn, 1)
out = self.diffusion_model(xc, t, context=cc, y=c_adm, control=control)
elif self.conditioning_key == 'crossattn-adm':
assert c_adm is not None
cc = torch.cat(c_crossattn, 1)
out = self.diffusion_model(x, t, context=cc, y=c_adm, control=control)
elif self.conditioning_key == 'adm':
cc = c_crossattn[0]
out = self.diffusion_model(x, t, y=cc, control=control)
else:
raise NotImplementedError()
return out
class LatentUpscaleDiffusion(LatentDiffusion):
def __init__(self, *args, low_scale_config, low_scale_key="LR", noise_level_key=None, **kwargs):
super().__init__(*args, **kwargs)
# assumes that neither the cond_stage nor the low_scale_model contain trainable params
assert not self.cond_stage_trainable
self.instantiate_low_stage(low_scale_config)
self.low_scale_key = low_scale_key
self.noise_level_key = noise_level_key
def instantiate_low_stage(self, config):
model = instantiate_from_config(config)
self.low_scale_model = model.eval()
self.low_scale_model.train = disabled_train
for param in self.low_scale_model.parameters():
param.requires_grad = False
@torch.no_grad()
def get_input(self, batch, k, cond_key=None, bs=None, log_mode=False):
if not log_mode:
z, c = super().get_input(batch, k, force_c_encode=True, bs=bs)
else:
z, c, x, xrec, xc = super().get_input(batch, self.first_stage_key, return_first_stage_outputs=True,
force_c_encode=True, return_original_cond=True, bs=bs)
x_low = batch[self.low_scale_key][:bs]
x_low = rearrange(x_low, 'b h w c -> b c h w')
x_low = x_low.to(memory_format=torch.contiguous_format).float()
zx, noise_level = self.low_scale_model(x_low)
if self.noise_level_key is not None:
# get noise level from batch instead, e.g. when extracting a custom noise level for bsr
raise NotImplementedError('TODO')
all_conds = {"c_concat": [zx], "c_crossattn": [c], "c_adm": noise_level}
if log_mode:
# TODO: maybe disable if too expensive
x_low_rec = self.low_scale_model.decode(zx)
return z, all_conds, x, xrec, xc, x_low, x_low_rec, noise_level
return z, all_conds
@torch.no_grad()
def log_images(self, batch, N=8, n_row=4, sample=True, ddim_steps=200, ddim_eta=1., return_keys=None,
plot_denoise_rows=False, plot_progressive_rows=True, plot_diffusion_rows=True,
unconditional_guidance_scale=1., unconditional_guidance_label=None, use_ema_scope=True,
**kwargs):
ema_scope = self.ema_scope if use_ema_scope else nullcontext
use_ddim = ddim_steps is not None
log = dict()
z, c, x, xrec, xc, x_low, x_low_rec, noise_level = self.get_input(batch, self.first_stage_key, bs=N,
log_mode=True)
N = min(x.shape[0], N)
n_row = min(x.shape[0], n_row)
log["inputs"] = x
log["reconstruction"] = xrec
log["x_lr"] = x_low
log[f"x_lr_rec_@noise_levels{'-'.join(map(lambda x: str(x), list(noise_level.cpu().numpy())))}"] = x_low_rec
if self.model.conditioning_key is not None:
if hasattr(self.cond_stage_model, "decode"):
xc = self.cond_stage_model.decode(c)
log["conditioning"] = xc
elif self.cond_stage_key in ["caption", "txt"]:
xc = log_txt_as_img((x.shape[2], x.shape[3]), batch[self.cond_stage_key], size=x.shape[2] // 25)
log["conditioning"] = xc
elif self.cond_stage_key in ['class_label', 'cls']:
xc = log_txt_as_img((x.shape[2], x.shape[3]), batch["human_label"], size=x.shape[2] // 25)
log['conditioning'] = xc
elif isimage(xc):
log["conditioning"] = xc
if ismap(xc):
log["original_conditioning"] = self.to_rgb(xc)
if plot_diffusion_rows:
# get diffusion row
diffusion_row = list()
z_start = z[:n_row]
for t in range(self.num_timesteps):
if t % self.log_every_t == 0 or t == self.num_timesteps - 1:
t = repeat(torch.tensor([t]), '1 -> b', b=n_row)
t = t.to(self.device).long()
noise = torch.randn_like(z_start)
z_noisy = self.q_sample(x_start=z_start, t=t, noise=noise)
diffusion_row.append(self.decode_first_stage(z_noisy))
diffusion_row = torch.stack(diffusion_row) # n_log_step, n_row, C, H, W
diffusion_grid = rearrange(diffusion_row, 'n b c h w -> b n c h w')
diffusion_grid = rearrange(diffusion_grid, 'b n c h w -> (b n) c h w')
diffusion_grid = make_grid(diffusion_grid, nrow=diffusion_row.shape[0])
log["diffusion_row"] = diffusion_grid
if sample:
# get denoise row
with ema_scope("Sampling"):
samples, z_denoise_row = self.sample_log(cond=c, batch_size=N, ddim=use_ddim,
ddim_steps=ddim_steps, eta=ddim_eta)
# samples, z_denoise_row = self.sample(cond=c, batch_size=N, return_intermediates=True)
x_samples = self.decode_first_stage(samples)
log["samples"] = x_samples
if plot_denoise_rows:
denoise_grid = self._get_denoise_row_from_list(z_denoise_row)
log["denoise_row"] = denoise_grid
if unconditional_guidance_scale > 1.0:
uc_tmp = self.get_unconditional_conditioning(N, unconditional_guidance_label)
# TODO explore better "unconditional" choices for the other keys
# maybe guide away from empty text label and highest noise level and maximally degraded zx?
uc = dict()
for k in c:
if k == "c_crossattn":
assert isinstance(c[k], list) and len(c[k]) == 1
uc[k] = [uc_tmp]
elif k == "c_adm": # todo: only run with text-based guidance?
assert isinstance(c[k], torch.Tensor)
#uc[k] = torch.ones_like(c[k]) * self.low_scale_model.max_noise_level
uc[k] = c[k]
elif isinstance(c[k], list):
uc[k] = [c[k][i] for i in range(len(c[k]))]
else:
uc[k] = c[k]
with ema_scope("Sampling with classifier-free guidance"):
samples_cfg, _ = self.sample_log(cond=c, batch_size=N, ddim=use_ddim,
ddim_steps=ddim_steps, eta=ddim_eta,
unconditional_guidance_scale=unconditional_guidance_scale,
unconditional_conditioning=uc,
)
x_samples_cfg = self.decode_first_stage(samples_cfg)
log[f"samples_cfg_scale_{unconditional_guidance_scale:.2f}"] = x_samples_cfg
if plot_progressive_rows:
with ema_scope("Plotting Progressives"):
img, progressives = self.progressive_denoising(c,
shape=(self.channels, self.image_size, self.image_size),
batch_size=N)
prog_row = self._get_denoise_row_from_list(progressives, desc="Progressive Generation")
log["progressive_row"] = prog_row
return log
class LatentFinetuneDiffusion(LatentDiffusion):
"""
Basis for different finetunas, such as inpainting or depth2image
To disable finetuning mode, set finetune_keys to None
"""
def __init__(self,
concat_keys: tuple,
finetune_keys=("model.diffusion_model.input_blocks.0.0.weight",
"model_ema.diffusion_modelinput_blocks00weight"
),
keep_finetune_dims=4,
# if model was trained without concat mode before and we would like to keep these channels
c_concat_log_start=None, # to log reconstruction of c_concat codes
c_concat_log_end=None,
*args, **kwargs
):
ckpt_path = kwargs.pop("ckpt_path", None)
ignore_keys = kwargs.pop("ignore_keys", list())
super().__init__(*args, **kwargs)
self.finetune_keys = finetune_keys
self.concat_keys = concat_keys
self.keep_dims = keep_finetune_dims
self.c_concat_log_start = c_concat_log_start
self.c_concat_log_end = c_concat_log_end
if exists(self.finetune_keys): assert exists(ckpt_path), 'can only finetune from a given checkpoint'
if exists(ckpt_path):
self.init_from_ckpt(ckpt_path, ignore_keys)
def init_from_ckpt(self, path, ignore_keys=list(), only_model=False):
sd = torch.load(path, map_location="cpu")
if "state_dict" in list(sd.keys()):
sd = sd["state_dict"]
keys = list(sd.keys())
for k in keys:
for ik in ignore_keys:
if k.startswith(ik):
print("Deleting key {} from state_dict.".format(k))
del sd[k]
# make it explicit, finetune by including extra input channels
if exists(self.finetune_keys) and k in self.finetune_keys:
new_entry = None
for name, param in self.named_parameters():
if name in self.finetune_keys:
print(
f"modifying key '{name}' and keeping its original {self.keep_dims} (channels) dimensions only")
new_entry = torch.zeros_like(param) # zero init
assert exists(new_entry), 'did not find matching parameter to modify'
new_entry[:, :self.keep_dims, ...] = sd[k]
sd[k] = new_entry
missing, unexpected = self.load_state_dict(sd, strict=False) if not only_model else self.model.load_state_dict(
sd, strict=False)
print(f"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys")
if len(missing) > 0:
print(f"Missing Keys: {missing}")
if len(unexpected) > 0:
print(f"Unexpected Keys: {unexpected}")
@torch.no_grad()
def log_images(self, batch, N=8, n_row=4, sample=True, ddim_steps=200, ddim_eta=1., return_keys=None,
quantize_denoised=True, inpaint=True, plot_denoise_rows=False, plot_progressive_rows=True,
plot_diffusion_rows=True, unconditional_guidance_scale=1., unconditional_guidance_label=None,
use_ema_scope=True,
**kwargs):
ema_scope = self.ema_scope if use_ema_scope else nullcontext
use_ddim = ddim_steps is not None
log = dict()
z, c, x, xrec, xc = self.get_input(batch, self.first_stage_key, bs=N, return_first_stage_outputs=True)
c_cat, c = c["c_concat"][0], c["c_crossattn"][0]
N = min(x.shape[0], N)
n_row = min(x.shape[0], n_row)
log["inputs"] = x
log["reconstruction"] = xrec
if self.model.conditioning_key is not None:
if hasattr(self.cond_stage_model, "decode"):
xc = self.cond_stage_model.decode(c)
log["conditioning"] = xc
elif self.cond_stage_key in ["caption", "txt"]:
xc = log_txt_as_img((x.shape[2], x.shape[3]), batch[self.cond_stage_key], size=x.shape[2] // 25)
log["conditioning"] = xc
elif self.cond_stage_key in ['class_label', 'cls']:
xc = log_txt_as_img((x.shape[2], x.shape[3]), batch["human_label"], size=x.shape[2] // 25)
log['conditioning'] = xc
elif isimage(xc):
log["conditioning"] = xc
if ismap(xc):
log["original_conditioning"] = self.to_rgb(xc)
if not (self.c_concat_log_start is None and self.c_concat_log_end is None):
log["c_concat_decoded"] = self.decode_first_stage(c_cat[:, self.c_concat_log_start:self.c_concat_log_end])
if plot_diffusion_rows:
# get diffusion row
diffusion_row = list()
z_start = z[:n_row]
for t in range(self.num_timesteps):
if t % self.log_every_t == 0 or t == self.num_timesteps - 1:
t = repeat(torch.tensor([t]), '1 -> b', b=n_row)
t = t.to(self.device).long()
noise = torch.randn_like(z_start)
z_noisy = self.q_sample(x_start=z_start, t=t, noise=noise)
diffusion_row.append(self.decode_first_stage(z_noisy))
diffusion_row = torch.stack(diffusion_row) # n_log_step, n_row, C, H, W
diffusion_grid = rearrange(diffusion_row, 'n b c h w -> b n c h w')
diffusion_grid = rearrange(diffusion_grid, 'b n c h w -> (b n) c h w')
diffusion_grid = make_grid(diffusion_grid, nrow=diffusion_row.shape[0])
log["diffusion_row"] = diffusion_grid
if sample:
# get denoise row
with ema_scope("Sampling"):
samples, z_denoise_row = self.sample_log(cond={"c_concat": [c_cat], "c_crossattn": [c]},
batch_size=N, ddim=use_ddim,
ddim_steps=ddim_steps, eta=ddim_eta)
# samples, z_denoise_row = self.sample(cond=c, batch_size=N, return_intermediates=True)
x_samples = self.decode_first_stage(samples)
log["samples"] = x_samples
if plot_denoise_rows:
denoise_grid = self._get_denoise_row_from_list(z_denoise_row)
log["denoise_row"] = denoise_grid
if unconditional_guidance_scale > 1.0:
uc_cross = self.get_unconditional_conditioning(N, unconditional_guidance_label)
uc_cat = c_cat
uc_full = {"c_concat": [uc_cat], "c_crossattn": [uc_cross]}
with ema_scope("Sampling with classifier-free guidance"):
samples_cfg, _ = self.sample_log(cond={"c_concat": [c_cat], "c_crossattn": [c]},
batch_size=N, ddim=use_ddim,
ddim_steps=ddim_steps, eta=ddim_eta,
unconditional_guidance_scale=unconditional_guidance_scale,
unconditional_conditioning=uc_full,
)
x_samples_cfg = self.decode_first_stage(samples_cfg)
log[f"samples_cfg_scale_{unconditional_guidance_scale:.2f}"] = x_samples_cfg
return log
class LatentInpaintDiffusion(LatentFinetuneDiffusion):
"""
can either run as pure inpainting model (only concat mode) or with mixed conditionings,
e.g. mask as concat and text via cross-attn.
To disable finetuning mode, set finetune_keys to None
"""
def __init__(self,
concat_keys=("mask", "masked_image"),
masked_image_key="masked_image",
*args, **kwargs
):
super().__init__(concat_keys, *args, **kwargs)
self.masked_image_key = masked_image_key
assert self.masked_image_key in concat_keys
@torch.no_grad()
def get_input(self, batch, k, cond_key=None, bs=None, return_first_stage_outputs=False):
# note: restricted to non-trainable encoders currently
assert not self.cond_stage_trainable, 'trainable cond stages not yet supported for inpainting'
z, c, x, xrec, xc = super().get_input(batch, self.first_stage_key, return_first_stage_outputs=True,
force_c_encode=True, return_original_cond=True, bs=bs)
assert exists(self.concat_keys)
c_cat = list()
for ck in self.concat_keys:
cc = rearrange(batch[ck], 'b h w c -> b c h w').to(memory_format=torch.contiguous_format).float()
if bs is not None:
cc = cc[:bs]
cc = cc.to(self.device)
bchw = z.shape
if ck != self.masked_image_key:
cc = torch.nn.functional.interpolate(cc, size=bchw[-2:])
else:
cc = self.get_first_stage_encoding(self.encode_first_stage(cc))
c_cat.append(cc)
c_cat = torch.cat(c_cat, dim=1)
all_conds = {"c_concat": [c_cat], "c_crossattn": [c]}
if return_first_stage_outputs:
return z, all_conds, x, xrec, xc
return z, all_conds
@torch.no_grad()
def log_images(self, *args, **kwargs):
log = super(LatentInpaintDiffusion, self).log_images(*args, **kwargs)
log["masked_image"] = rearrange(args[0]["masked_image"],
'b h w c -> b c h w').to(memory_format=torch.contiguous_format).float()
return log
class LatentDepth2ImageDiffusion(LatentFinetuneDiffusion):
"""
condition on monocular depth estimation
"""
def __init__(self, depth_stage_config, concat_keys=("midas_in",), *args, **kwargs):
super().__init__(concat_keys=concat_keys, *args, **kwargs)
self.depth_model = instantiate_from_config(depth_stage_config)
self.depth_stage_key = concat_keys[0]
@torch.no_grad()
def get_input(self, batch, k, cond_key=None, bs=None, return_first_stage_outputs=False):
# note: restricted to non-trainable encoders currently
assert not self.cond_stage_trainable, 'trainable cond stages not yet supported for depth2img'
z, c, x, xrec, xc = super().get_input(batch, self.first_stage_key, return_first_stage_outputs=True,
force_c_encode=True, return_original_cond=True, bs=bs)
assert exists(self.concat_keys)
assert len(self.concat_keys) == 1
c_cat = list()
for ck in self.concat_keys:
cc = batch[ck]
if bs is not None:
cc = cc[:bs]
cc = cc.to(self.device)
cc = self.depth_model(cc)
cc = torch.nn.functional.interpolate(
cc,
size=z.shape[2:],
mode="bicubic",
align_corners=False,
)
depth_min, depth_max = torch.amin(cc, dim=[1, 2, 3], keepdim=True), torch.amax(cc, dim=[1, 2, 3],
keepdim=True)
cc = 2. * (cc - depth_min) / (depth_max - depth_min + 0.001) - 1.
c_cat.append(cc)
c_cat = torch.cat(c_cat, dim=1)
all_conds = {"c_concat": [c_cat], "c_crossattn": [c]}
if return_first_stage_outputs:
return z, all_conds, x, xrec, xc
return z, all_conds
@torch.no_grad()
def log_images(self, *args, **kwargs):
log = super().log_images(*args, **kwargs)
depth = self.depth_model(args[0][self.depth_stage_key])
depth_min, depth_max = torch.amin(depth, dim=[1, 2, 3], keepdim=True), \
torch.amax(depth, dim=[1, 2, 3], keepdim=True)
log["depth"] = 2. * (depth - depth_min) / (depth_max - depth_min) - 1.
return log
class LatentUpscaleFinetuneDiffusion(LatentFinetuneDiffusion):
"""
condition on low-res image (and optionally on some spatial noise augmentation)
"""
def __init__(self, concat_keys=("lr",), reshuffle_patch_size=None,
low_scale_config=None, low_scale_key=None, *args, **kwargs):
super().__init__(concat_keys=concat_keys, *args, **kwargs)
self.reshuffle_patch_size = reshuffle_patch_size
self.low_scale_model = None
if low_scale_config is not None:
print("Initializing a low-scale model")
assert exists(low_scale_key)
self.instantiate_low_stage(low_scale_config)
self.low_scale_key = low_scale_key
def instantiate_low_stage(self, config):
model = instantiate_from_config(config)
self.low_scale_model = model.eval()
self.low_scale_model.train = disabled_train
for param in self.low_scale_model.parameters():
param.requires_grad = False
@torch.no_grad()
def get_input(self, batch, k, cond_key=None, bs=None, return_first_stage_outputs=False):
# note: restricted to non-trainable encoders currently
assert not self.cond_stage_trainable, 'trainable cond stages not yet supported for upscaling-ft'
z, c, x, xrec, xc = super().get_input(batch, self.first_stage_key, return_first_stage_outputs=True,
force_c_encode=True, return_original_cond=True, bs=bs)
assert exists(self.concat_keys)
assert len(self.concat_keys) == 1
# optionally make spatial noise_level here
c_cat = list()
noise_level = None
for ck in self.concat_keys:
cc = batch[ck]
cc = rearrange(cc, 'b h w c -> b c h w')
if exists(self.reshuffle_patch_size):
assert isinstance(self.reshuffle_patch_size, int)
cc = rearrange(cc, 'b c (p1 h) (p2 w) -> b (p1 p2 c) h w',
p1=self.reshuffle_patch_size, p2=self.reshuffle_patch_size)
if bs is not None:
cc = cc[:bs]
cc = cc.to(self.device)
if exists(self.low_scale_model) and ck == self.low_scale_key:
cc, noise_level = self.low_scale_model(cc)
c_cat.append(cc)
c_cat = torch.cat(c_cat, dim=1)
if exists(noise_level):
all_conds = {"c_concat": [c_cat], "c_crossattn": [c], "c_adm": noise_level}
else:
all_conds = {"c_concat": [c_cat], "c_crossattn": [c]}
if return_first_stage_outputs:
return z, all_conds, x, xrec, xc
return z, all_conds
@torch.no_grad()
def log_images(self, *args, **kwargs):
log = super().log_images(*args, **kwargs)
log["lr"] = rearrange(args[0]["lr"], 'b h w c -> b c h w')
return log
|
PypiClean
|
/gamification-engine-0.4.0.tar.gz/gamification-engine-0.4.0/gengine/app/jsscripts/node_modules/autoprefixer/lib/hacks/align-content.js
|
'use strict';
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var flexSpec = require('./flex-spec');
var Declaration = require('../declaration');
var AlignContent = function (_Declaration) {
_inherits(AlignContent, _Declaration);
function AlignContent() {
_classCallCheck(this, AlignContent);
return _possibleConstructorReturn(this, _Declaration.apply(this, arguments));
}
/**
* Change property name for 2012 spec
*/
AlignContent.prototype.prefixed = function prefixed(prop, prefix) {
var spec = void 0;
var _flexSpec = flexSpec(prefix);
spec = _flexSpec[0];
prefix = _flexSpec[1];
if (spec === 2012) {
return prefix + 'flex-line-pack';
} else {
return _Declaration.prototype.prefixed.call(this, prop, prefix);
}
};
/**
* Return property name by final spec
*/
AlignContent.prototype.normalize = function normalize() {
return 'align-content';
};
/**
* Change value for 2012 spec and ignore prefix for 2009
*/
AlignContent.prototype.set = function set(decl, prefix) {
var spec = flexSpec(prefix)[0];
if (spec === 2012) {
decl.value = AlignContent.oldValues[decl.value] || decl.value;
return _Declaration.prototype.set.call(this, decl, prefix);
} else if (spec === 'final') {
return _Declaration.prototype.set.call(this, decl, prefix);
}
return undefined;
};
return AlignContent;
}(Declaration);
Object.defineProperty(AlignContent, 'names', {
enumerable: true,
writable: true,
value: ['align-content', 'flex-line-pack']
});
Object.defineProperty(AlignContent, 'oldValues', {
enumerable: true,
writable: true,
value: {
'flex-end': 'end',
'flex-start': 'start',
'space-between': 'justify',
'space-around': 'distribute'
}
});
module.exports = AlignContent;
|
PypiClean
|
/django-el-pagination-4.0.0.tar.gz/django-el-pagination-4.0.0/doc/javascript.rst
|
JavaScript reference
====================
For each type of pagination it is possible to enable Ajax so that the requested
page is loaded using an asynchronous request to the server. This is especially
important for :doc:`twitter_pagination` and
:ref:`endless pagination on scroll<javascript-pagination-on-scroll>`, but
:doc:`digg_pagination` can also take advantage of this technique.
Activating Ajax support
~~~~~~~~~~~~~~~~~~~~~~~
Ajax support is activated linking jQuery and the ``el-pagination.js`` file
included in this app. It is then possible to use the *$.endlessPaginate()*
jQuery plugin to enable Ajax pagination, e.g.:
.. code-block:: html+django
<h2>Entries:</h2>
<div class="endless_page_template">
{% include page_template %}
</div>
{% block js %}
{{ block.super }}
<script src="http://code.jquery.com/jquery-latest.js"></script>
<script src="{{ STATIC_URL }}el-pagination/js/el-pagination.js"></script>
<script>$.endlessPaginate();</script>
{% endblock %}
This example assumes that you
:ref:`separated the fragment<twitter-split-template>` containing the single
page (*page_tempate*) from the main template (the code snipper above). More on
this in :doc:`twitter_pagination` and :doc:`digg_pagination`.
The *$.endlessPaginate()* call activates Ajax for each pagination present in
the page.
.. _javascript-pagination-on-scroll:
Pagination on scroll
~~~~~~~~~~~~~~~~~~~~
If you want new items to load when the user scrolls down the browser page,
you can use the **pagination on scroll** feature: just set the
*paginateOnScroll* option of *$.endlessPaginate()* to *true*, e.g.:
.. code-block:: html+django
<h2>Entries:</h2>
<div class="endless_page_template">
{% include page_template %}
</div>
{% block js %}
{{ block.super }}
<script src="http://code.jquery.com/jquery-latest.js"></script>
<script src="{{ STATIC_URL }}el-pagination/js/el-pagination.js"></script>
<script>$.endlessPaginate({paginateOnScroll: true});</script>
{% endblock %}
That's all. See the :doc:`templatetags_reference` page to improve usage of
the included templatetags.
It is possible to set the **bottom margin** used for pagination on scroll
(default is 1 pixel). For example, if you want the pagination on scroll
to be activated when 20 pixels remain to the end of the page:
.. code-block:: html+django
<h2>Entries:</h2>
<div class="endless_page_template">
{% include page_template %}
</div>
{% block js %}
{{ block.super }}
<script src="http://code.jquery.com/jquery-latest.js"></script>
<script src="{{ STATIC_URL }}el-pagination/js/el-pagination.js"></script>
<script>
$.endlessPaginate({
paginateOnScroll: true,
paginateOnScrollMargin: 200
});
</script>
{% endblock %}
Attaching callbacks
~~~~~~~~~~~~~~~~~~~
It is possible to customize the behavior of JavaScript pagination by attaching
callbacks to *$.endlessPaginate()*, called when the following events are fired:
- *onClick*: the user clicks on a page link;
- *onCompleted*: the new page is fully loaded and inserted in the DOM.
The context of both callbacks is the clicked link fragment: in other words,
inside the callbacks, *this* will be the HTML fragment representing the clicked
link, e.g.:
.. code-block:: html+django
<h2>Entries:</h2>
<div class="endless_page_template">
{% include page_template %}
</div>
{% block js %}
{{ block.super }}
<script src="http://code.jquery.com/jquery-latest.js"></script>
<script src="{{ STATIC_URL }}el-pagination/js/el-pagination.js"></script>
<script>
$.endlessPaginate({
onClick: function() {
console.log('Label:', $(this).text());
}
});
</script>
{% endblock %}
Both callbacks also receive a *context* argument containing information about
the requested page:
- *context.url*: the requested URL;
- *context.key*: the querystring key used to retrieve the requested contents.
If the *onClick* callback returns *false*, the pagination process is stopped,
the Ajax request is not performed and the *onCompleted* callback never called.
The *onCompleted* callbacks also receives a second argument: the data returned
by the server. Basically this is the HTML fragment representing the new
requested page.
To wrap it up, here is an example showing the callbacks' signatures:
.. code-block:: html+django
<h2>Entries:</h2>
<div class="endless_page_template">
{% include page_template %}
</div>
{% block js %}
{{ block.super }}
<script src="http://code.jquery.com/jquery-latest.js"></script>
<script src="{{ STATIC_URL }}el-pagination/js/el-pagination.js"></script>
<script>
$.endlessPaginate({
onClick: function(context) {
console.log('Label:', $(this).text());
console.log('URL:', context.url);
console.log('Querystring key:', context.key);
if (forbidden) { // to be defined...
return false;
}
},
onCompleted: function(context, fragment) {
console.log('Label:', $(this).text());
console.log('URL:', context.url);
console.log('Querystring key:', context.key);
console.log('Fragment:', fragment);
}
});
</script>
{% endblock %}
Manually selecting what to bind
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
As seen above, *$.endlessPaginate()* enables Ajax support for each pagination
in the page. But assuming you are using :doc:`multiple_pagination`, e.g.:
.. code-block:: html+django
<h2>Entries:</h2>
<div id="entries" class="endless_page_template">
{% include "myapp/entries_page.html" %}
</div>
<h2>Other entries:</h2>
<div id="other-entries" class="endless_page_template">
{% include "myapp/other_entries_page.html" %}
</div>
{% block js %}
{{ block.super }}
<script src="http://code.jquery.com/jquery-latest.js"></script>
<script src="{{ STATIC_URL }}el-pagination/js/el-pagination.js"></script>
<script>$.endlessPaginate();</script>
{% endblock %}
What if you need Ajax pagination only for *entries* and not for
*other entries*? You can do this in a straightforward way using jQuery
selectors, e.g.:
.. code-block:: html+django
{% block js %}
{{ block.super }}
<script src="http://code.jquery.com/jquery-latest.js"></script>
<script src="{{ STATIC_URL }}el-pagination/js/el-pagination.js"></script>
<script>$('#entries').endlessPaginate();</script>
{% endblock %}
The call to *$('#entries').endlessPaginate()* applies Ajax pagination starting
from the DOM node with id *entries* and to all sub-nodes. This means that
*other entries* are left intact. Of course you can use any selector supported
by jQuery.
At this point, you might have already guessed that *$.endlessPaginate()*
is just an alias for *$('body').endlessPaginate()*.
Customize each pagination
~~~~~~~~~~~~~~~~~~~~~~~~~
You can also call *$.endlessPaginate()* multiple times if you want to customize
the behavior of each pagination. E.g. if you need to register a callback for
*entries* but not for *other entries*:
.. code-block:: html+django
<h2>Entries:</h2>
<div id="entries" class="endless_page_template">
{% include "myapp/entries_page.html" %}
</div>
<h2>Other entries:</h2>
<div id="other-entries" class="endless_page_template">
{% include "myapp/other_entries_page.html" %}
</div>
{% block js %}
{{ block.super }}
<script src="http://code.jquery.com/jquery-latest.js"></script>
<script src="{{ STATIC_URL }}el-pagination/js/el-pagination.js"></script>
<script>
$('#entries').endlessPaginate({
onCompleted: function(data) {
console.log('New entries loaded.');
}
});
$('#other-entries').endlessPaginate();
</script>
{% endblock %}
.. _javascript-selectors:
Selectors
~~~~~~~~~
Each time *$.endlessPaginate()* is used, several JavaScript selectors are used
to select DOM nodes. Here is a list of them all:
- containerSelector: '.endless_container'
(Twitter-style pagination container selector);
- loadingSelector: '.endless_loading' -
(Twitter-style pagination loading selector);
- moreSelector: 'a.endless_more' -
(Twitter-style pagination link selector);
- contentSelector: null -
(Twitter-style pagination content wrapper);
- pageSelector: '.endless_page_template'
(Digg-style pagination page template selector);
- pagesSelector: 'a.endless_page_link'
(Digg-style pagination link selector).
An example can better explain the meaning of the selectors above. Assume you
have a Digg-style pagination like the following:
.. code-block:: html+django
<h2>Entries:</h2>
<div id="entries" class="endless_page_template">
{% include "myapp/entries_page.html" %}
</div>
{% block js %}
{{ block.super }}
<script src="http://code.jquery.com/jquery-latest.js"></script>
<script src="{{ STATIC_URL }}el-pagination/js/el-pagination.js"></script>
<script>
$('#entries').endlessPaginate();
</script>
{% endblock %}
Here the ``#entries`` node is selected and Digg-style pagination is applied.
Digg-style needs to know which DOM node will be updated with new contents,
and in this case it's the same node we selected, because we added the
*endless_page_template* class to that node, and *.endless_page_template*
is the selector used by default. However, the following example is equivalent
and does not involve adding another class to the container:
.. code-block:: html+django
<h2>Entries:</h2>
<div id="entries">
{% include "myapp/entries_page.html" %}
</div>
{% block js %}
{{ block.super }}
<script src="http://code.jquery.com/jquery-latest.js"></script>
<script src="{{ STATIC_URL }}el-pagination/js/el-pagination.js"></script>
<script>
$('#entries').endlessPaginate({
pageSelector: '#entries'
});
</script>
{% endblock %}
.. _javascript-chunks:
On scroll pagination using chunks
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Sometimes, when using on scroll pagination, you may want to still display
the *show more* link after each *N* pages. In Django Endless Pagination this is
called *chunk size*. For instance, a chunk size of 5 means that a *show more*
link is displayed after page 5 is loaded, then after page 10, then after page
15 and so on. Activating this functionality is straightforward, just use the
*paginateOnScrollChunkSize* option:
.. code-block:: html+django
{% block js %}
{{ block.super }}
<script src="http://code.jquery.com/jquery-latest.js"></script>
<script src="{{ STATIC_URL }}el-pagination/js/el-pagination.js"></script>
<script>
$.endlessPaginate({
paginateOnScroll: true,
paginateOnScrollChunkSize: 5
});
</script>
{% endblock %}
Each time a chunk size is complete, the class ``endless_chunk_complete`` is added to the *show more* link,
so you still have a way to distinguish between the implicit
click done by the scroll event and a real click on the button.
.. _javascript-migrate:
Migrate from version 1.1 to 2.1
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Django Endless Pagination v2.0 introduces changes in how Ajax pagination
is handled by JavaScript. These changes are discussed in this document and in
the :doc:`changelog`.
The JavaScript code now lives in a file named ``el-pagination.js``.
The two JavaScript files ``el-pagination-endless.js`` and ``el-pagination_on_scroll.js`` was removed.
However, please consider migrating: the old JavaScript files was removed, are
no longer maintained, and don't provide the new JavaScript features.
Instructions on how to migrate from the old version to the new one follow.
Basic migration
---------------
Before:
.. code-block:: html+django
<h2>Entries:</h2>
{% include page_template %}
{% block js %}
{{ block.super }}
<script src="http://code.jquery.com/jquery-latest.js"></script>
<script src="{{ STATIC_URL }}el-pagination/js/el-pagination-endless.js"></script>
{% endblock %}
Now:
.. code-block:: html+django
<h2>Entries:</h2>
{% include page_template %}
{% block js %}
{{ block.super }}
<script src="http://code.jquery.com/jquery-latest.js"></script>
<script src="{{ STATIC_URL }}el-pagination/js/el-pagination.js"></script>
<script>$.endlessPaginate();</script>
{% endblock %}
Pagination on scroll
--------------------
Before:
.. code-block:: html+django
<h2>Entries:</h2>
{% include page_template %}
{% block js %}
{{ block.super }}
<script src="http://code.jquery.com/jquery-latest.js"></script>
<script src="{{ STATIC_URL }}el-pagination/js/el-pagination-endless.js"></script>
<script src="{{ STATIC_URL }}el-pagination/js/el-pagination_on_scroll.js"></script>
{% endblock %}
Now:
.. code-block:: html+django
<h2>Entries:</h2>
{% include page_template %}
{% block js %}
{{ block.super }}
<script src="http://code.jquery.com/jquery-latest.js"></script>
<script src="{{ STATIC_URL }}el-pagination/js/el-pagination.js"></script>
<script>
$.endlessPaginate({paginateOnScroll: true});
</script>
{% endblock %}
Pagination on scroll with customized bottom margin
--------------------------------------------------
Before:
.. code-block:: html+django
<h2>Entries:</h2>
{% include page_template %}
{% block js %}
{{ block.super }}
<script src="http://code.jquery.com/jquery-latest.js"></script>
<script src="{{ STATIC_URL }}el-pagination/js/el-pagination-endless.js"></script>
<script src="{{ STATIC_URL }}el-pagination/js/el-pagination_on_scroll.js"></script>
<script>
var endless_on_scroll_margin = 200;
</script>
{% endblock %}
Now:
.. code-block:: html+django
<h2>Entries:</h2>
{% include page_template %}
{% block js %}
{{ block.super }}
<script src="http://code.jquery.com/jquery-latest.js"></script>
<script src="{{ STATIC_URL }}el-pagination/js/el-pagination.js"></script>
<script>
$.endlessPaginate({
paginateOnScroll: true,
paginateOnScrollMargin: 200
});
</script>
{% endblock %}
Avoid enabling Ajax on one or more paginations
----------------------------------------------
Before:
.. code-block:: html+django
<h2>Other entries:</h2>
<div class="endless_page_template endless_page_skip">
{% include "myapp/other_entries_page.html" %}
</div>
{% block js %}
{{ block.super }}
<script src="http://code.jquery.com/jquery-latest.js"></script>
<script src="{{ STATIC_URL }}el-pagination/js/el-pagination-endless.js"></script>
{% endblock %}
Now:
.. code-block:: html+django
<h2>Other entries:</h2>
<div class="endless_page_template endless_page_skip">
{% include "myapp/other_entries_page.html" %}
</div>
{% block js %}
{{ block.super }}
<script src="http://code.jquery.com/jquery-latest.js"></script>
<script src="{{ STATIC_URL }}el-pagination/js/el-pagination.js"></script>
<script>$('not:(.endless_page_skip)').endlessPaginate();</script>
{% endblock %}
In this last example, activating Ajax just where you want might be preferred
over excluding nodes.
|
PypiClean
|
/build-service-osc-0.16.2.tar.gz/build-service-osc-0.16.2/README
|
osc -- opensuse-commander with svn like handling
Patches can be submitted via
* mail to [email protected]
* Bugzilla: https://bugzilla.novell.com/enter_bug.cgi?product=openSUSE.org&component=BuildService
* or the official Git repository on Github:
https://github.com/openSUSE/osc
INSTALLATION:
RPM packages are here (rpm-md repository):
http://download.opensuse.org/repositories/openSUSE:/Tools/
To install from git, do
python setup.py build
python setup.py install
# create a symlink 'osc' in your path pointing to osc.py.
ln -s osc-wrapper.py /usr/bin/osc
Alternatively, you can directly use osc-wrapper.py from the source dir
(which is easier if you develop on osc).
The program needs the cElementTree python module installed. On SUSE, the
respective package is called python-elementtree (before 10.2: python-xml).
For local building, you will need python-urlgrabber in addition. Those are
standard package on SUSE Linux since a while. If your version is too old, you
can find python-elementtree and python-urlgrabber here:
http://download.opensuse.org/repositories/devel:/languages:/python/
CONFIGURATION:
When you use it for the first time, it will ask you for your username and
password, and store it in ~/.oscrc.
CONFIGURATION MIGRATION (only affects versions >= 0.114):
Version 0.114 got some cleanups for the configfile handling and therefore some
options are now deprecated, namely:
* apisrv
* scheme
One new option was added:
* apiurl = <protocol>://<somehost> # use this as the default apiurl. If this
option isn't specified the default (https://api.opensuse.org) is used.
So far osc still has some backward compatibility for these options but it might
get removed in the future that's why it issues a deprecation warning in case
one of those options is still in use.
The new configuration scheme looks like the following:
# entry for an apiurl
[<protocol>://<apiurl>]
user = <username>
password = <password>
...
'''Before starting the migration please save your ~/.oscrc file!'''
If the migration doesn't work for whatever reason feel free to send me an email
or ask on the opensuse-buildservice mailinglist or in the #opensuse-buildservice
irc channel.
=== Migration case I (apisrv only) ===
The apisrv option is used to specify the default apihost. If apisrv isn't
specified at all the default ("api.opensuse.org") is used.
The current [general] section looks like this:
[general]
...
apisrv = <somehost>
# or
apisrv = <protocol>://<somehost>
apisrv got superseded by the new apiurl option which looks like this:
[general]
...
apiurl = <protocol>://<somehost>
If apisrv has no "<protocol>" https is used. Make sure all apiurl sections have
the new format which is described above. Afterwards apisrv can be removed.
=== Migration case II (scheme only) ===
The current [general] section looks like this:
[general]
...
scheme = <protocol>
This means every apiurl section which don't have the new format which is
described above for instance
[<somehost>]
user = <username>
password = <password>
...
has to be converted to
[<protocol>://<somehost>]
user = <username>
password = <password>
...
Afterwards the scheme option can be removed from the [general] section (it
might be the case that some sections already have the correct format).
=== Migration case III (apisrv and scheme) ===
The current [general] section looks like this:
[general]
...
apisrv = <somehost>
scheme = <protocol>
Both options can be removed if all apiurl sections have the new format which is
described above. So basically just adjust all apiurl sections (it might be the
case that some sections already have the correct format).
KEYRING USAGE
Osc now can store passwords in keyrings instead of ~/.oscrc. To use it,
you need python-keyring and either python-keyring-kde or -gnome.
If you want to switch to using a keyring you need to delete apiurl section
from ~/.oscrc and you will be asked for credentials again, which will be then
stored in the keyring application.
WORKING COPY INCONSISTENT (only affects version >= 0.130)
osc's working copy handling was rewritten in 0.130. Thus some
consistency checks were added. As a result osc might complain
that some old working copies are in an inconsistent state:
Your working copy '.' is in an inconsistent state.
Please run 'osc repairwc .' (Note this might _remove_
files from the .osc/ dir). Please check the state
of the working copy afterwards (via 'osc status .')
To fix this simply run "osc repairwc ." as suggested in the
error message. Note that "osc repairwc ." might need to contact
the api in order to fetch some missing files. Also it might remove
some files from the storedir (.osc/) but it won't touch any locally
modified files.
If it DOES NOT fix the problem please create a bug report and attach
your working copy to the bug (if possible).
USAGE EXAMPLES:
(online at http://en.opensuse.org/openSUSE:OSC )
To list existing content on the server
osc ls # list projects
osc ls Apache # list packages in a project
osc ls Apache subversion # list files of package of a project
Check out content
osc co Apache # entire project
osc co Apache subversion # a package
osc co Apache subversion foo # single file
Update a working copy
osc up
osc up [pac_dir] # update a single package by its path
osc up * # from within a project dir, update all packages
osc up # from within a project dir, update all packages
# AND check out all newly added packages
If an update can't be merged automatically, a file is in 'C' (conflict)
state, and conflicts are marked with special <<<<<<< and >>>>>>> lines.
After manually resolving the problem, use
osc resolved foo
Upload change content
osc ci # current dir
osc ci <dir>
osc ci file1 file2 ...
Show the status (which files have been changed locally)
osc st
osc st <directory>
osc st file1 file2 ...
Mark files to be added or removed on the next 'checkin'
osc add file1 file2 ...
osc rm file1 file2 ...
Adds all new files in local copy and removes all disappeared files.
osc addremove
Generates a diff, to view the changes
osc diff # current dir
osc diff file1 file2 ...
Shows the build results of the package
osc results
osc results [repository]
Shows the log file of a package (you need to be inside a package directory)
osc log <repository> <arch>
Shows the URLs of .repo files which are packages sources for Yum/YaST/smart
osc repourls [dir]
Triggers a package rebuild for all repositories/architectures of a package
osc rebuildpac [dir]
Shows available repository/build targets
osc repository
Shows the configured repository/build targets of a project
osc repository <project>
Shows meta information
osc meta Apache
osc meta Apache subversion
osc id username
Edit meta information
(Creates new package/project if it doesn't exist)
osc editmeta Apache
osc editmeta Apache subversion
Update package meta data with metadata taken from spec file
osc updatepacmetafromspec <dir>
There are other commands, which you may not need (they may be useful in scripts):
osc repos
osc buildconfig
osc buildinfo
Locally build a package (see 'osc help build' for more info):
osc build <repo> <arch> specfile [--clean|--noinit]
Update a package to a different sources (directory foo_package_source):
cp -a foo_package_source foo; cd foo; osc init <prj> <pac>; osc addremove; osc ci; cd $OLDPWD; rm -r foo
HINT FOR W3M USERS
Putting the following in the file ~/.w3m/passwd will make
w3m know the credentials for the buildservice servers:
"""
host api.opensuse.org
port 80
realm Authentication required
login foo
password bar
host build.opensuse.org
port 80
realm openSUSE Build Service
login foo
password bar
"""
chmod 0600 ~/.w3m/passwd
NOTES about the testsuite
A new test suite has been created and should run via doing
# cd tests
# python suite.py
|
PypiClean
|
/odoo_addon_l10n_es_ticketbai_pos-15.0.1.0.6-py3-none-any.whl/odoo/addons/l10n_es_ticketbai_pos/static/src/js/tbai_models.js
|
odoo.define("l10n_es_ticketbai_pos.tbai_models", function (require) {
"use strict";
var core = require("web.core");
var _t = core._t;
var field_utils = require("web.field_utils");
var Backbone = window.Backbone;
var tbai = window.tbai;
var QRCode = window.QRCode;
/* A TicketBAI Simplified Invoice represents a customer's order
to be exported to the Tax Agency.
*/
var TicketBAISimplifiedInvoice = Backbone.Model.extend({
initialize: function (attributes, options) {
Backbone.Model.prototype.initialize.apply(this, arguments);
var opts = options || {};
this.pos = opts.pos;
this.previous_tbai_invoice = null;
this.order = opts.order || null;
this.number = opts.number || null;
this.number_prefix = opts.number_prefix || null;
this.expedition_date = opts.expedition_date || null;
this.signature_value = opts.signature_value || null;
this.tbai_identifier = opts.tbai_identifier || null;
this.tbai_qr_src = opts.tbai_qr_src || null;
this.tbai_qr_url = null;
this.vat_regime_key = "01";
this.vat_regime_key2 = null;
this.vat_regime_key3 = null;
this.unsigned_datas = null;
this.datas = null;
},
// Tested on Epson TM-20II
// 164 (default pixels with margin '0') * 35 (required QR image width in mm) / 22 (default width in mm) = 260
// Pixels. 255 is the maximum.
qr_options: {
margin: 0,
width: 255,
},
build_invoice: function () {
var self = this;
var built = new $.Deferred();
var options = {};
var deviceId = this.pos.config.tbai_device_serial_number || null;
// Addon l10n_es_pos -> Order.export_as_JSON()
var simplified_invoice = null;
var tbai_json = null;
this.previous_tbai_invoice = this.pos.get_tbai_last_invoice_data();
this.expedition_date = new Date();
if (!this.pos.config.pos_sequence_by_device) {
this.number_prefix = this.pos.config.l10n_es_simplified_invoice_prefix;
simplified_invoice =
this.order.simplified_invoice ||
this.number_prefix +
this.pos.get_padding_simple_inv(
this.pos.config.l10n_es_simplified_invoice_number,
this.pos.config.l10n_es_simplified_invoice_padding
);
} else {
this.number_prefix =
this.pos.get_device().device_simplified_invoice_prefix;
simplified_invoice =
this.number_prefix +
this.pos.get_padding_simple_inv(
this.pos.get_device().device_simplified_invoice_number,
this.pos.get_device().device_simplified_invoice_padding
);
}
this.number = simplified_invoice.slice(this.number_prefix.length);
if (this.order.fiscal_position) {
var tbai_vat_regime_key =
this.order.fiscal_position.tbai_vat_regime_key;
if (tbai_vat_regime_key) {
var id_vat_regime_key =
this.order.fiscal_position.tbai_vat_regime_key[0];
var object_vat_regime_key = self.pos.tbai_vat_regime_keys.find(
(x) => x.id === id_vat_regime_key
);
this.vat_regime_key = object_vat_regime_key.code;
}
var tbai_vat_regime_key2 =
this.order.fiscal_position.tbai_vat_regime_key2;
if (tbai_vat_regime_key2) {
var id_vat_regime_key =
this.order.fiscal_position.tbai_vat_regime_key2[0];
var object_vat_regime_key = self.pos.tbai_vat_regime_keys.find(
(x) => x.id === id_vat_regime_key
);
this.vat_regime_key2 = object_vat_regime_key.code;
}
var tbai_vat_regime_key3 =
this.order.fiscal_position.tbai_vat_regime_key3;
if (tbai_vat_regime_key3) {
var id_vat_regime_key =
this.order.fiscal_position.tbai_vat_regime_key3[0];
var object_vat_regime_key = self.pos.tbai_vat_regime_keys.find(
(x) => x.id === id_vat_regime_key
);
this.vat_regime_key3 = object_vat_regime_key.code;
}
}
tbai_json = this.export_as_JSON();
if (!_.isEmpty(tbai_json) && this.pos.tbai_signer !== null) {
if (typeof deviceId === "string" || deviceId instanceof String) {
options.deviceId = deviceId;
}
try {
this.unsigned_datas = tbai.toXml(
tbai_json.Invoice,
tbai_json.PreviousInvoiceId || null,
tbai_json.Software,
options
);
this.pos.tbai_signer.sign(this.unsigned_datas).then(
function (datas) {
self.datas = datas;
self.signature_value = tbai.getTbaiChainInfo(datas).hash;
self.tbai_identifier = tbai.getTbaiId(datas);
self.tbai_qr_url = tbai.getTbaiUrlFromBaseURL(
datas,
self.pos.tbai_qr_base_url
);
QRCode.toDataURL(self.tbai_qr_url, self.qr_options).then(
function (src) {
self.tbai_qr_src = src;
built.resolve();
},
function (err) {
throw new Error(err);
}
);
},
function (err) {
throw new Error(err);
}
);
} catch (e) {
console.error(e);
this.showPopup("ErrorPopup", {
title: _t("TicketBAI"),
body: e.message,
});
built.reject();
}
} else {
built.reject();
}
return built;
},
get_vat_without_country_code: function (vat, country_code) {
var vat_without_country_code = null;
var vat_upper = vat.toUpperCase();
var country_code_upper = country_code ? country_code.toUpperCase() : null;
if (
country_code_upper &&
vat_upper.slice(0, country_code_upper.length) === country_code_upper
) {
vat_without_country_code = vat_upper.slice(country_code_upper.length);
} else {
vat_without_country_code = vat_upper;
}
return vat_without_country_code;
},
get_tbai_company_vat: function () {
var company = this.pos.company;
return this.get_vat_without_country_code(company.vat, company.country.code);
},
get_tbai_partner_vat: function (partner_id) {
var partner = this.pos.db.get_partner_by_id(partner_id);
var country_code = this.pos.get_country_code_by_id(partner.country_id[0]);
if (country_code === "ES" || partner.tbai_partner_idtype === "02") {
return this.get_vat_without_country_code(partner.vat, country_code);
}
return partner.tbai_partner_identification_number;
},
export_as_JSON: function () {
var order_json =
(this.order !== null && this.order.export_as_JSON()) || null;
var tbai_json = {};
var company = this.pos.company;
var vat_keys = [this.vat_regime_key];
var self = this;
var simplified = "N";
if (
order_json !== null &&
this.number !== null &&
this.expedition_date !== null
) {
if (this.vat_regime_key2 !== null) {
vat_keys.push(this.vat_regime_key2);
}
if (this.vat_regime_key3 !== null) {
vat_keys.push(this.vat_regime_key3);
}
if (company.tbai_vat_regime_simplified) {
simplified = "S";
}
tbai_json.Invoice = {
simple: true,
issuer: {
irsId: this.get_tbai_company_vat(),
name: company.name,
},
id: {
number: this.number,
serie: this.number_prefix,
issuedTime: this.expedition_date,
},
description: {
text: order_json.name,
operationDate: this.expedition_date,
},
lines: this.get_tbai_lines_from_json(order_json.lines),
total: field_utils.parse.float(
self.pos.format_currency_no_symbol(order_json.amount_total)
),
vatKeys: vat_keys,
simplified: simplified,
};
tbai_json.Invoice.vatLines =
this.get_tbai_vat_lines_from_json(order_json);
if (order_json.partner_id) {
var partner = this.pos.db.get_partner_by_id(order_json.partner_id);
var zip = partner.zip;
var address =
(partner.street || "") +
", " +
(partner.zip || "") +
" " +
(partner.city || "") +
", " +
(partner.country_id[1] || "");
tbai_json.Invoice.recipient = {
irsId: this.get_tbai_partner_vat(order_json.partner_id),
name: partner.name,
postal: zip,
address: address,
};
}
if (this.previous_tbai_invoice !== null) {
tbai_json.PreviousInvoiceId = {
number: this.previous_tbai_invoice.number,
serie: this.previous_tbai_invoice.number_prefix,
issuedTime: new Date(
JSON.parse(
JSON.stringify(
this.previous_tbai_invoice.expedition_date
)
)
),
hash: this.previous_tbai_invoice.signature_value.substring(
0,
100
),
};
}
tbai_json.Software = {
license: company.tbai_license_key,
developerIrsId: this.get_tbai_partner_vat(
company.tbai_developer_id[0]
),
name: company.tbai_software_name,
version: company.tbai_software_version,
};
}
return tbai_json;
},
get_tbai_lines_from_json: function (lines_json) {
var lines = [];
var line = null;
var company = this.pos.company;
var description_line = null;
var self = this;
lines_json.forEach(function (item) {
line = item[2];
description_line = line.tbai_description.substring(0, 250);
if (company.tbai_protected_data && company.tbai_protected_data_txt) {
description_line = company.tbai_protected_data_txt.substring(
0,
250
);
}
lines.push({
description: description_line,
quantity: line.qty,
price: field_utils.parse.float(
self.pos.format_currency_no_symbol(line.tbai_price_unit)
),
discount: field_utils.parse.float(
self.pos.format_currency_no_symbol(line.discount)
),
discountAmount: field_utils.parse.float(
self.pos.format_currency_no_symbol(
(line.qty * line.tbai_price_unit * line.discount) / 100.0
)
),
vat: line.tbai_vat_amount,
amount: field_utils.parse.float(
self.pos.format_currency_no_symbol(line.tbai_price_without_tax)
),
amountWithVat: field_utils.parse.float(
self.pos.format_currency_no_symbol(line.tbai_price_with_tax)
),
});
});
return lines;
},
get_tbai_vat_lines_from_json: function (order_json) {
var vatLines = [];
var vatLinesJson = order_json.taxLines;
var self = this;
if (vatLinesJson && vatLinesJson.length > 0) {
vatLinesJson.forEach(function (vatLineJson) {
var vatLine = vatLineJson[2];
vatLines.push({
base: field_utils.parse.float(
self.pos.format_currency_no_symbol(vatLine.baseAmount)
),
rate: vatLine.tax.amount,
amount: field_utils.parse.float(
self.pos.format_currency_no_symbol(vatLine.amount)
),
});
});
} else {
var fline = order_json.lines[0][2];
vatLines.push({
base: field_utils.parse.float(
self.pos.format_currency_no_symbol(order_json.amount_total)
),
rate: fline.tbai_vat_amount,
amount: 0,
});
}
return vatLines;
},
});
return {
TicketBAISimplifiedInvoice: TicketBAISimplifiedInvoice,
};
});
|
PypiClean
|
/fds.sdk.NaturalLanguageProcessing-0.22.6-py3-none-any.whl/fds/sdk/NaturalLanguageProcessing/model/http_error.py
|
import re # noqa: F401
import sys # noqa: F401
from fds.sdk.NaturalLanguageProcessing.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from fds.sdk.NaturalLanguageProcessing.exceptions import ApiAttributeError
class HTTPError(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'detail': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'message': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'detail': 'detail', # noqa: E501
'message': 'message', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""HTTPError - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
detail ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
message (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""HTTPError - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
detail ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
message (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
|
PypiClean
|
/ydk-models-cisco-ios-xe-16.9.3.post1.tar.gz/ydk-models-cisco-ios-xe-16.9.3.post1/ydk/models/cisco_ios_xe/CISCO_FIREWALL_TC.py
|
import sys
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class CFWApplicationProtocol(Enum):
"""
CFWApplicationProtocol (Enum Class)
This type denotes the application (OSI Layer 7)
protocol/service corresponding to a firewall session
or a connection.
Description of constants of this type
'none'
Denotes the semantics of 'not applicable'.
'other'
Denotes any protocol not listed.
.. data:: none = 1
.. data:: other = 2
.. data:: ftp = 3
.. data:: telnet = 4
.. data:: smtp = 5
.. data:: http = 6
.. data:: tacacs = 7
.. data:: dns = 8
.. data:: sqlnet = 9
.. data:: https = 10
.. data:: tftp = 11
.. data:: gopher = 12
.. data:: finger = 13
.. data:: kerberos = 14
.. data:: pop2 = 15
.. data:: pop3 = 16
.. data:: sunRpc = 17
.. data:: msRpc = 18
.. data:: nntp = 19
.. data:: snmp = 20
.. data:: imap = 21
.. data:: ldap = 22
.. data:: exec_ = 23
.. data:: login = 24
.. data:: shell = 25
.. data:: msSql = 26
.. data:: sybaseSql = 27
.. data:: nfs = 28
.. data:: lotusnote = 29
.. data:: h323 = 30
.. data:: cuseeme = 31
.. data:: realmedia = 32
.. data:: netshow = 33
.. data:: streamworks = 34
.. data:: vdolive = 35
.. data:: sap = 36
.. data:: sip = 37
.. data:: mgcp = 38
.. data:: rtsp = 39
.. data:: skinny = 40
.. data:: gtpV0 = 41
.. data:: gtpV1 = 42
.. data:: echo = 43
.. data:: discard = 44
.. data:: daytime = 45
.. data:: netstat = 46
.. data:: ssh = 47
.. data:: time = 48
.. data:: tacacsDs = 49
.. data:: bootps = 50
.. data:: bootpc = 51
.. data:: dnsix = 52
.. data:: rtelnet = 53
.. data:: ident = 54
.. data:: sqlServ = 55
.. data:: ntp = 56
.. data:: pwdgen = 57
.. data:: ciscoFna = 58
.. data:: ciscoTna = 59
.. data:: ciscoSys = 60
.. data:: netbiosNs = 61
.. data:: netbiosDgm = 62
.. data:: netbiosSsn = 63
.. data:: sqlSrv = 64
.. data:: snmpTrap = 65
.. data:: rsvd = 66
.. data:: send = 67
.. data:: xdmcp = 68
.. data:: bgp = 69
.. data:: irc = 70
.. data:: qmtp = 71
.. data:: ipx = 72
.. data:: dbase = 73
.. data:: imap3 = 74
.. data:: rsvpTunnel = 75
.. data:: hpCollector = 76
.. data:: hpManagedNode = 77
.. data:: hpAlarmMgr = 78
.. data:: microsoftDs = 79
.. data:: creativeServer = 80
.. data:: creativePartnr = 81
.. data:: appleQtc = 82
.. data:: igmpV3Lite = 83
.. data:: isakmp = 84
.. data:: biff = 85
.. data:: who = 86
.. data:: syslog = 87
.. data:: router = 88
.. data:: ncp = 89
.. data:: timed = 90
.. data:: ircServ = 91
.. data:: uucp = 92
.. data:: syslogConn = 93
.. data:: sshell = 94
.. data:: ldaps = 95
.. data:: dhcpFailover = 96
.. data:: msexchRouting = 97
.. data:: entrustSvcs = 98
.. data:: entrustSvcHandler = 99
.. data:: ciscoTdp = 100
.. data:: webster = 101
.. data:: gdoi = 102
.. data:: iscsi = 103
.. data:: cddbp = 104
.. data:: ftps = 105
.. data:: telnets = 106
.. data:: imaps = 107
.. data:: ircs = 108
.. data:: pop3s = 109
.. data:: socks = 110
.. data:: kazaa = 111
.. data:: msSqlM = 112
.. data:: msSna = 113
.. data:: wins = 114
.. data:: ica = 115
.. data:: orasrv = 116
.. data:: rdbDbsDisp = 117
.. data:: vqp = 118
.. data:: icabrowser = 119
.. data:: kermit = 120
.. data:: rsvpEncap = 121
.. data:: l2tp = 122
.. data:: pptp = 123
.. data:: h323Gatestat = 124
.. data:: rWinsock = 125
.. data:: radius = 126
.. data:: hsrp = 127
.. data:: net8Cman = 128
.. data:: oracleEmVp = 129
.. data:: oracleNames = 130
.. data:: oracle = 131
.. data:: ciscoSvcs = 132
.. data:: ciscoNetMgmt = 133
.. data:: stun = 134
.. data:: trRsrb = 135
.. data:: ddnsV3 = 136
.. data:: aceSvr = 137
.. data:: giop = 138
.. data:: ttc = 139
.. data:: ipass = 140
.. data:: clp = 141
.. data:: citrixImaClient = 142
.. data:: sms = 143
.. data:: citrix = 144
.. data:: realSecure = 145
.. data:: lotusMtap = 146
.. data:: cifs = 147
.. data:: msDotnetster = 148
.. data:: tarantella = 149
.. data:: fcipPort = 150
.. data:: ssp = 151
.. data:: iscsiTarget = 152
.. data:: mySql = 153
.. data:: msClusterNet = 154
.. data:: ldapAdmin = 155
.. data:: ieee80211Iapp = 156
.. data:: oemAgent = 157
.. data:: rtcPmPort = 158
.. data:: dbControlAgent = 159
.. data:: ipsecMsft = 160
.. data:: sipTls = 161
.. data:: aim = 162
.. data:: pcAnyWhereData = 163
.. data:: pcAnyWhereStat = 164
.. data:: x11 = 165
.. data:: ircu = 166
.. data:: n2h2Server = 167
.. data:: h323CallSigAlt = 168
.. data:: yahooMsgr = 169
.. data:: msnMsgr = 170
"""
none = Enum.YLeaf(1, "none")
other = Enum.YLeaf(2, "other")
ftp = Enum.YLeaf(3, "ftp")
telnet = Enum.YLeaf(4, "telnet")
smtp = Enum.YLeaf(5, "smtp")
http = Enum.YLeaf(6, "http")
tacacs = Enum.YLeaf(7, "tacacs")
dns = Enum.YLeaf(8, "dns")
sqlnet = Enum.YLeaf(9, "sqlnet")
https = Enum.YLeaf(10, "https")
tftp = Enum.YLeaf(11, "tftp")
gopher = Enum.YLeaf(12, "gopher")
finger = Enum.YLeaf(13, "finger")
kerberos = Enum.YLeaf(14, "kerberos")
pop2 = Enum.YLeaf(15, "pop2")
pop3 = Enum.YLeaf(16, "pop3")
sunRpc = Enum.YLeaf(17, "sunRpc")
msRpc = Enum.YLeaf(18, "msRpc")
nntp = Enum.YLeaf(19, "nntp")
snmp = Enum.YLeaf(20, "snmp")
imap = Enum.YLeaf(21, "imap")
ldap = Enum.YLeaf(22, "ldap")
exec_ = Enum.YLeaf(23, "exec")
login = Enum.YLeaf(24, "login")
shell = Enum.YLeaf(25, "shell")
msSql = Enum.YLeaf(26, "msSql")
sybaseSql = Enum.YLeaf(27, "sybaseSql")
nfs = Enum.YLeaf(28, "nfs")
lotusnote = Enum.YLeaf(29, "lotusnote")
h323 = Enum.YLeaf(30, "h323")
cuseeme = Enum.YLeaf(31, "cuseeme")
realmedia = Enum.YLeaf(32, "realmedia")
netshow = Enum.YLeaf(33, "netshow")
streamworks = Enum.YLeaf(34, "streamworks")
vdolive = Enum.YLeaf(35, "vdolive")
sap = Enum.YLeaf(36, "sap")
sip = Enum.YLeaf(37, "sip")
mgcp = Enum.YLeaf(38, "mgcp")
rtsp = Enum.YLeaf(39, "rtsp")
skinny = Enum.YLeaf(40, "skinny")
gtpV0 = Enum.YLeaf(41, "gtpV0")
gtpV1 = Enum.YLeaf(42, "gtpV1")
echo = Enum.YLeaf(43, "echo")
discard = Enum.YLeaf(44, "discard")
daytime = Enum.YLeaf(45, "daytime")
netstat = Enum.YLeaf(46, "netstat")
ssh = Enum.YLeaf(47, "ssh")
time = Enum.YLeaf(48, "time")
tacacsDs = Enum.YLeaf(49, "tacacsDs")
bootps = Enum.YLeaf(50, "bootps")
bootpc = Enum.YLeaf(51, "bootpc")
dnsix = Enum.YLeaf(52, "dnsix")
rtelnet = Enum.YLeaf(53, "rtelnet")
ident = Enum.YLeaf(54, "ident")
sqlServ = Enum.YLeaf(55, "sqlServ")
ntp = Enum.YLeaf(56, "ntp")
pwdgen = Enum.YLeaf(57, "pwdgen")
ciscoFna = Enum.YLeaf(58, "ciscoFna")
ciscoTna = Enum.YLeaf(59, "ciscoTna")
ciscoSys = Enum.YLeaf(60, "ciscoSys")
netbiosNs = Enum.YLeaf(61, "netbiosNs")
netbiosDgm = Enum.YLeaf(62, "netbiosDgm")
netbiosSsn = Enum.YLeaf(63, "netbiosSsn")
sqlSrv = Enum.YLeaf(64, "sqlSrv")
snmpTrap = Enum.YLeaf(65, "snmpTrap")
rsvd = Enum.YLeaf(66, "rsvd")
send = Enum.YLeaf(67, "send")
xdmcp = Enum.YLeaf(68, "xdmcp")
bgp = Enum.YLeaf(69, "bgp")
irc = Enum.YLeaf(70, "irc")
qmtp = Enum.YLeaf(71, "qmtp")
ipx = Enum.YLeaf(72, "ipx")
dbase = Enum.YLeaf(73, "dbase")
imap3 = Enum.YLeaf(74, "imap3")
rsvpTunnel = Enum.YLeaf(75, "rsvpTunnel")
hpCollector = Enum.YLeaf(76, "hpCollector")
hpManagedNode = Enum.YLeaf(77, "hpManagedNode")
hpAlarmMgr = Enum.YLeaf(78, "hpAlarmMgr")
microsoftDs = Enum.YLeaf(79, "microsoftDs")
creativeServer = Enum.YLeaf(80, "creativeServer")
creativePartnr = Enum.YLeaf(81, "creativePartnr")
appleQtc = Enum.YLeaf(82, "appleQtc")
igmpV3Lite = Enum.YLeaf(83, "igmpV3Lite")
isakmp = Enum.YLeaf(84, "isakmp")
biff = Enum.YLeaf(85, "biff")
who = Enum.YLeaf(86, "who")
syslog = Enum.YLeaf(87, "syslog")
router = Enum.YLeaf(88, "router")
ncp = Enum.YLeaf(89, "ncp")
timed = Enum.YLeaf(90, "timed")
ircServ = Enum.YLeaf(91, "ircServ")
uucp = Enum.YLeaf(92, "uucp")
syslogConn = Enum.YLeaf(93, "syslogConn")
sshell = Enum.YLeaf(94, "sshell")
ldaps = Enum.YLeaf(95, "ldaps")
dhcpFailover = Enum.YLeaf(96, "dhcpFailover")
msexchRouting = Enum.YLeaf(97, "msexchRouting")
entrustSvcs = Enum.YLeaf(98, "entrustSvcs")
entrustSvcHandler = Enum.YLeaf(99, "entrustSvcHandler")
ciscoTdp = Enum.YLeaf(100, "ciscoTdp")
webster = Enum.YLeaf(101, "webster")
gdoi = Enum.YLeaf(102, "gdoi")
iscsi = Enum.YLeaf(103, "iscsi")
cddbp = Enum.YLeaf(104, "cddbp")
ftps = Enum.YLeaf(105, "ftps")
telnets = Enum.YLeaf(106, "telnets")
imaps = Enum.YLeaf(107, "imaps")
ircs = Enum.YLeaf(108, "ircs")
pop3s = Enum.YLeaf(109, "pop3s")
socks = Enum.YLeaf(110, "socks")
kazaa = Enum.YLeaf(111, "kazaa")
msSqlM = Enum.YLeaf(112, "msSqlM")
msSna = Enum.YLeaf(113, "msSna")
wins = Enum.YLeaf(114, "wins")
ica = Enum.YLeaf(115, "ica")
orasrv = Enum.YLeaf(116, "orasrv")
rdbDbsDisp = Enum.YLeaf(117, "rdbDbsDisp")
vqp = Enum.YLeaf(118, "vqp")
icabrowser = Enum.YLeaf(119, "icabrowser")
kermit = Enum.YLeaf(120, "kermit")
rsvpEncap = Enum.YLeaf(121, "rsvpEncap")
l2tp = Enum.YLeaf(122, "l2tp")
pptp = Enum.YLeaf(123, "pptp")
h323Gatestat = Enum.YLeaf(124, "h323Gatestat")
rWinsock = Enum.YLeaf(125, "rWinsock")
radius = Enum.YLeaf(126, "radius")
hsrp = Enum.YLeaf(127, "hsrp")
net8Cman = Enum.YLeaf(128, "net8Cman")
oracleEmVp = Enum.YLeaf(129, "oracleEmVp")
oracleNames = Enum.YLeaf(130, "oracleNames")
oracle = Enum.YLeaf(131, "oracle")
ciscoSvcs = Enum.YLeaf(132, "ciscoSvcs")
ciscoNetMgmt = Enum.YLeaf(133, "ciscoNetMgmt")
stun = Enum.YLeaf(134, "stun")
trRsrb = Enum.YLeaf(135, "trRsrb")
ddnsV3 = Enum.YLeaf(136, "ddnsV3")
aceSvr = Enum.YLeaf(137, "aceSvr")
giop = Enum.YLeaf(138, "giop")
ttc = Enum.YLeaf(139, "ttc")
ipass = Enum.YLeaf(140, "ipass")
clp = Enum.YLeaf(141, "clp")
citrixImaClient = Enum.YLeaf(142, "citrixImaClient")
sms = Enum.YLeaf(143, "sms")
citrix = Enum.YLeaf(144, "citrix")
realSecure = Enum.YLeaf(145, "realSecure")
lotusMtap = Enum.YLeaf(146, "lotusMtap")
cifs = Enum.YLeaf(147, "cifs")
msDotnetster = Enum.YLeaf(148, "msDotnetster")
tarantella = Enum.YLeaf(149, "tarantella")
fcipPort = Enum.YLeaf(150, "fcipPort")
ssp = Enum.YLeaf(151, "ssp")
iscsiTarget = Enum.YLeaf(152, "iscsiTarget")
mySql = Enum.YLeaf(153, "mySql")
msClusterNet = Enum.YLeaf(154, "msClusterNet")
ldapAdmin = Enum.YLeaf(155, "ldapAdmin")
ieee80211Iapp = Enum.YLeaf(156, "ieee80211Iapp")
oemAgent = Enum.YLeaf(157, "oemAgent")
rtcPmPort = Enum.YLeaf(158, "rtcPmPort")
dbControlAgent = Enum.YLeaf(159, "dbControlAgent")
ipsecMsft = Enum.YLeaf(160, "ipsecMsft")
sipTls = Enum.YLeaf(161, "sipTls")
aim = Enum.YLeaf(162, "aim")
pcAnyWhereData = Enum.YLeaf(163, "pcAnyWhereData")
pcAnyWhereStat = Enum.YLeaf(164, "pcAnyWhereStat")
x11 = Enum.YLeaf(165, "x11")
ircu = Enum.YLeaf(166, "ircu")
n2h2Server = Enum.YLeaf(167, "n2h2Server")
h323CallSigAlt = Enum.YLeaf(168, "h323CallSigAlt")
yahooMsgr = Enum.YLeaf(169, "yahooMsgr")
msnMsgr = Enum.YLeaf(170, "msnMsgr")
class CFWNetworkProtocol(Enum):
"""
CFWNetworkProtocol (Enum Class)
This type denotes protocols operating at
layers 3 or 4 of Open System Interconnection (OSI)
model.
The following values are defined\:
'none'
Denotes the semantics of 'not applicable'.
'other'
Denotes any protocol not listed.
'ip'
Denotes Internet Protocol (IP).
'icmp'
Denotes Internet Control Message
Protocol.
'gre'
Denotes Generic Route Encapsulation
protocol.
'udp'
Denotes User Datagram Protocol.
'tcp'
Denotes Transmission Control Protocol.
.. data:: none = 1
.. data:: other = 2
.. data:: ip = 3
.. data:: icmp = 4
.. data:: gre = 5
.. data:: udp = 6
.. data:: tcp = 7
"""
none = Enum.YLeaf(1, "none")
other = Enum.YLeaf(2, "other")
ip = Enum.YLeaf(3, "ip")
icmp = Enum.YLeaf(4, "icmp")
gre = Enum.YLeaf(5, "gre")
udp = Enum.YLeaf(6, "udp")
tcp = Enum.YLeaf(7, "tcp")
class CFWPolicyTargetType(Enum):
"""
CFWPolicyTargetType (Enum Class)
This type is used to represent the type of
a policy target.
The following values are defined\:
'all'
Certain firewall implementations allow policies
to be applied on all applicable targets. (Such
policies are termed 'global'). The target type
'all' denotes the set of all applicable
targets.
'other'
Denotes an entity type that has yet not been
classified in one of the other types. This
value is useful in accomodating new target types
before the textual convention is revised to
include them.
'interface'
The policy target is an interface of the managed
device.
'zone'
The policy target is a zone, where a zone is
is a collection of interfaces of the managed
device.
'zonepair'
The policy target is a pair of zones.
'user'
Denotes the identity of a user who is
authorized to access the firewall itself or
the resources protected by the firewall.
'usergroup'
Denotes the identity of a user group.
User group denotes a collection of user
identities, as defined above.
'context'
Denotes a logical device defined in the managed
device with a distinct management context.
Examples of such logical devices include
virtual contexts defined by Firewall Service
Module, virtual sensors defined by Intrusion
Detection Service Module and Virtual Routing
and Forwarding (VRFs) defined by IOS.
.. data:: all = 1
.. data:: other = 2
.. data:: interface = 3
.. data:: zone = 4
.. data:: zonepair = 5
.. data:: user = 6
.. data:: usergroup = 7
.. data:: context = 8
"""
all = Enum.YLeaf(1, "all")
other = Enum.YLeaf(2, "other")
interface = Enum.YLeaf(3, "interface")
zone = Enum.YLeaf(4, "zone")
zonepair = Enum.YLeaf(5, "zonepair")
user = Enum.YLeaf(6, "user")
usergroup = Enum.YLeaf(7, "usergroup")
context = Enum.YLeaf(8, "context")
class CFWUrlServerStatus(Enum):
"""
CFWUrlServerStatus (Enum Class)
This type denotes the status of the URL filtering
server which the firewall uses to implement URL
filtering.
The following values are defined\:
'online'
Indicates that the Server is online
'offline'
Indicates that the Server is offline
'indeterminate'
Indicates that the Server status
cannot be determined
.. data:: online = 1
.. data:: offline = 2
.. data:: indeterminate = 3
"""
online = Enum.YLeaf(1, "online")
offline = Enum.YLeaf(2, "offline")
indeterminate = Enum.YLeaf(3, "indeterminate")
class CFWUrlfVendorId(Enum):
"""
CFWUrlfVendorId (Enum Class)
This type denotes the vendor of a URL filtering
server which the firewall uses to implement URL
filtering.
A URL filtering server provides a database of URLs
with appropriate access restrictions (e.g.,
deny or permit). Various security devices can make
use of these filtering servers to provide URL filtering
functionality to the users.
The following values are defined\:
'other'
Other type of URL filtering servers than those
specified below.
'websense'
Websense URL filtering server. One of the products
provided by Websense is a Web Filtering Server.
More information about Websense Web Filtering
product can be found at http\://www.websense.com
'n2h2'
N2H2 URL filtering server. More information about
N2H2 Filtering product can be found at
http\://www.n2h2.com
.. data:: other = 1
.. data:: websense = 2
.. data:: n2h2 = 3
"""
other = Enum.YLeaf(1, "other")
websense = Enum.YLeaf(2, "websense")
n2h2 = Enum.YLeaf(3, "n2h2")
|
PypiClean
|
/jupyterhub_url_sharing-0.1.0.tar.gz/jupyterhub_url_sharing-0.1.0/node_modules/core-js-pure/modules/esnext.async-iterator.flat-map.js
|
'use strict';
var $ = require('../internals/export');
var call = require('../internals/function-call');
var aCallable = require('../internals/a-callable');
var anObject = require('../internals/an-object');
var isObject = require('../internals/is-object');
var getIteratorDirect = require('../internals/get-iterator-direct');
var createAsyncIteratorProxy = require('../internals/async-iterator-create-proxy');
var createIterResultObject = require('../internals/create-iter-result-object');
var getAsyncIteratorFlattenable = require('../internals/get-async-iterator-flattenable');
var closeAsyncIteration = require('../internals/async-iterator-close');
var AsyncIteratorProxy = createAsyncIteratorProxy(function (Promise) {
var state = this;
var iterator = state.iterator;
var mapper = state.mapper;
return new Promise(function (resolve, reject) {
var doneAndReject = function (error) {
state.done = true;
reject(error);
};
var ifAbruptCloseAsyncIterator = function (error) {
closeAsyncIteration(iterator, doneAndReject, error, doneAndReject);
};
var outerLoop = function () {
try {
Promise.resolve(anObject(call(state.next, iterator))).then(function (step) {
try {
if (anObject(step).done) {
state.done = true;
resolve(createIterResultObject(undefined, true));
} else {
var value = step.value;
try {
var result = mapper(value, state.counter++);
var handler = function (mapped) {
try {
state.inner = getAsyncIteratorFlattenable(mapped);
innerLoop();
} catch (error4) { ifAbruptCloseAsyncIterator(error4); }
};
if (isObject(result)) Promise.resolve(result).then(handler, ifAbruptCloseAsyncIterator);
else handler(result);
} catch (error3) { ifAbruptCloseAsyncIterator(error3); }
}
} catch (error2) { doneAndReject(error2); }
}, doneAndReject);
} catch (error) { doneAndReject(error); }
};
var innerLoop = function () {
var inner = state.inner;
if (inner) {
try {
Promise.resolve(anObject(call(inner.next, inner.iterator))).then(function (result) {
try {
if (anObject(result).done) {
state.inner = null;
outerLoop();
} else resolve(createIterResultObject(result.value, false));
} catch (error1) { ifAbruptCloseAsyncIterator(error1); }
}, ifAbruptCloseAsyncIterator);
} catch (error) { ifAbruptCloseAsyncIterator(error); }
} else outerLoop();
};
innerLoop();
});
});
// `AsyncIterator.prototype.flaMap` method
// https://github.com/tc39/proposal-async-iterator-helpers
$({ target: 'AsyncIterator', proto: true, real: true }, {
flatMap: function flatMap(mapper) {
anObject(this);
aCallable(mapper);
return new AsyncIteratorProxy(getIteratorDirect(this), {
mapper: mapper,
inner: null
});
}
});
|
PypiClean
|
/thunes-0.31-=%20py37-none-any.whl/transaction/models.py
|
from django.db import models
from sender.models import Sender
from beneficiary.models import Beneficiary
from quotation.models import Quotation
# Create your models here.
class Transaction(models.Model):
TRANSACTION_STATUS = (
('10000', 'CREATED'),
('20000', 'CONFIRMED'),
('20110', 'CONFIRMED-UNDER-REVIEW-SLS'),
('20150', 'CONFIRMED-WAITING-FOR-PICKUP'),
('30000', 'REJECTED'),
('30110', 'REJECTED-SLS-SENDER'),
('30120', 'REJECTED-SLS-BENEFICIARY'),
('30200', 'REJECTED-INVALID-BENEFICIARY'),
('30201', 'REJECTED-BARRED-BENEFICIARY'),
('30210', 'REJECTED-INVALID-BENEFICIARY-DETAILS'),
('30305', 'REJECTED-LIMITATIONS-ON-TRANSACTION-VALUE'),
('30310', 'REJECTED-LIMITATIONS-ON-SENDER-VALUE'),
('30320', 'REJECTED-LIMITATIONS-ON-BENEFICIARY-VALUE'),
('30330', 'REJECTED-LIMITATIONS-ON-ACCOUNT-VALUE'),
('30350', 'REJECTED-LIMITATIONS-ON-SENDER-QUANTITY'),
('30360', 'REJECTED-LIMITATIONS-ON-BENEFICIARY-QUANTITY'),
('30370', 'REJECTED-LIMITATIONS-ON-ACCOUNT-QUANTITY'),
('30400', 'REJECTED-PAYER-CURRENTLY-UNAVAILABLE'),
('30500', 'REJECTED-INSUFFICIENT-BALANCE'),
('40000', 'CANCELLED'),
('50000', 'SUBMITTED'),
('60000', 'AVAILABLE'),
('70000', 'COMPLETED'),
('80000', 'REVERSED'),
('90000', 'DECLINED'),
('90110', 'DECLINED-SLS-SENDER'),
('90120', 'DECLINED-SLS-BENEFICIARY'),
('90200', 'DECLINED-INVALID-BENEFICIARY'),
('90201', 'DECLINED-BARRED-BENEFICIARY'),
('90202', 'DECLINED-UNSUPPORTED-BENEFICIARY'),
('90210', 'DECLINED-INVALID-BENEFICIARY-DETAILS'),
('90305', 'DECLINED-LIMITATIONS-ON-TRANSACTION-VALUE'),
('90310', 'DECLINED-LIMITATIONS-ON-SENDER-VALUE'),
('90320', 'DECLINED-LIMITATIONS-ON-BENEFICIARY-VALUE'),
('90330', 'DECLINED-LIMITATIONS-ON-ACCOUNT-VALUE'),
('90331', 'DECLINED-LIMITATIONS-ON-ACCOUNT-VALUE-DAILY'),
('90332', 'DECLINED-LIMITATIONS-ON-ACCOUNT-VALUE-WEEKLY'),
('90333', 'DECLINED-LIMITATIONS-ON-ACCOUNT-VALUE-MONTHLY'),
('90334', 'DECLINED-LIMITATIONS-ON-ACCOUNT-VALUE-YEARLY'),
('90350', 'DECLINED-LIMITATIONS-ON-SENDER-QUANTITY'),
('90360', 'DECLINED-LIMITATIONS-ON-BENEFICIARY-QUANTITY'),
('90370', 'DECLINED-LIMITATIONS-ON-ACCOUNT-QUANTITY'),
('90400', 'DECLINED-PAYER-CURRENTLY-UNAVAILABLE')
)
TRANSACTION_STATUS_CLASS = (
('1', 'CREATED'),
('2', 'CONFIRMED'),
('3', 'REJECTED'),
('4', 'CANCELLED'),
('5', 'SUBMITTED'),
('6', 'AVAILABLE'),
('7', 'COMPLETED'),
('8', 'REVERSED'),
('9', 'DECLINED'),
)
transaction_id = models.CharField(max_length=20, verbose_name='Related Transaction ID')
quotation = models.OneToOneField(Quotation, max_length=50, verbose_name='Related Quotation Id',
related_name='transaction', on_delete=models.CASCADE)
status = models.CharField(max_length=50, blank=False,
default='10000', choices=TRANSACTION_STATUS, verbose_name='Transaction status code')
status_message = models.CharField(max_length=50, blank=False, default='10000', choices=TRANSACTION_STATUS,
verbose_name='Transaction status description')
status_class = models.CharField(max_length=50, blank=False, default='1', choices=TRANSACTION_STATUS_CLASS,
verbose_name='Transaction status class')
status_class_message = models.CharField(max_length=50, blank=False, default='1', choices=TRANSACTION_STATUS_CLASS)
external_id = models.CharField(max_length=50, blank=False, verbose_name='External ID')
external_code = models.CharField(max_length=50, blank=False, verbose_name='External reference code')
payer_transaction_reference = models.CharField(max_length=50, blank=True,
verbose_name='Payer transaction reference')
payer_transaction_code = models.CharField(max_length=50, blank=True, verbose_name='Payer transaction code')
creation_date = models.DateTimeField(max_length=50, auto_now=True, verbose_name='Creation date in HTTP format')
expiration_date = models.DateTimeField(max_length=50, blank=True, null=True,
verbose_name='Expiration date in HTTP format')
sender = models.ForeignKey(Sender, related_name='senders', on_delete=models.CASCADE,
verbose_name='Sender information')
beneficiary = models.ForeignKey(Beneficiary, related_name='beneficiaries',
on_delete=models.CASCADE, verbose_name='Beneficiary information')
callback_url = models.CharField(max_length=50, blank=False, verbose_name='Callback URL')
wholesale_fx_rate = models.DecimalField(max_digits=15, blank=True, null=True, decimal_places=15,
verbose_name='Wholesale FX rate')
retail_rate = models.DecimalField(max_digits=15, blank=True, null=True, decimal_places=15,
verbose_name='Retail rate')
retail_fee = models.DecimalField(max_digits=15, blank=True, null=True, decimal_places=15, verbose_name='Retail fee')
retail_fee_currency = models.CharField(max_length=50, blank=False,
verbose_name='Retail fee currency in ISO 4217 format')
purpose_of_remittance = models.CharField(max_length=50, blank=False, verbose_name='Purpose of remittance')
additional_information_1 = models.CharField(max_length=50, blank=True, verbose_name='Additional information')
additional_information_2 = models.CharField(max_length=50, blank=True, verbose_name='Additional information')
additional_information_3 = models.CharField(max_length=50, blank=True, verbose_name='Additional information')
def __str__(self):
return 'Transaction {}'.format(self.external_id)
def getconfirmtranslist(self):
return Transaction.objects.filter(status='20000')
def getunconfirmtranslist(self):
return Transaction.objects.filter(status='10000')
class Meta:
index_together = (('sender', 'beneficiary'),)
verbose_name = 'transaction'
db_table = 'transaction'
class CreditPartyIdentity(models.Model):
msisdn = models.CharField(max_length=50, blank=False, verbose_name='MSISDN in international format')
bank_account_number = models.CharField(max_length=50, blank=False, verbose_name='Bank account number')
swift_bic_code = models.CharField(max_length=50, blank=False, verbose_name='SWIFT-BIC code')
def __str__(self):
return "Credit Party Identity {} {} {}".format(self.msisdn, self.bank_account_number, self.swift_bic_code)
class Meta:
db_table = 'Credit Party Identity'
verbose_name = 'Credit Party Identity'
# class TransactionSenderBeneficiary(models.Model):
# sender = models.OneToOneField(Sender, verbose_name='sender entity in this transaction', on_delete=models.CASCADE)
# beneficiary = models.OneToOneField(Beneficiary, verbose_name='beneficiary entity in this transaction',
# on_delete=models.CASCADE)
|
PypiClean
|
/kabbes_aws_credentials-0.9.0.tar.gz/kabbes_aws_credentials-0.9.0/src/aws_credentials/AWS_Creds.py
|
from parent_class import ParentClass, ParentPluralDict
import py_starter as ps
class AWS_Cred (ParentClass) :
"""Class for storing one instance of a singular AWS Role and its associated credentials
role: AWS role associated
dict: contains each key-value combination for each environment variable and its value
string: contains a string representation of the dictionary, "key=value"
"""
BEGIN_ROLE = '['
END_ROLE = ']'
_IMP_ATTS = ['role','string','dict']
_ONE_LINE_ATTS = ['type','role']
def __init__( self, role = None, dict = {}, string = ''):
"""If initialized with a dict, role must be provided
if initialized with a string, the role must be contained in [square brackets up to]"""
ParentClass.__init__( self )
self.role = role
self.dict = dict
self.string = string
if string != '' and dict == {}:
self._string_to_dict()
elif string == '' and dict == {}:
pass
def _string_to_dict( self ):
"""turns the dictionary of the creds into a string"""
role_dict = {}
for line in self.string.split( '\n' ):
line = line.strip()
if line == '':
continue
elif self.line_is_role( line ):
self.role = self.get_role_from_line( line )
else:
if self.role != None:
key, value = self.get_key_value_from_line( line )
role_dict[key] = value
self.dict = role_dict
return role_dict
def get_role_from_line( self, line ):
"""Given [AWS_ROLE-1234], return AWS_ROLE-1234"""
if self.line_is_role( line ):
return line[ len(self.BEGIN_ROLE) : -1*len(self.END_ROLE) ]
return None
def line_is_role( self, line ):
"""If given a role like [AWS_ROLE-1234], return TRUE"""
if line[0] == self.BEGIN_ROLE and line[-1] == self.END_ROLE:
return True
return False
def get_key_value_from_line( self, string ):
"""Takes a string, splits by the FIRST equal sign and sets it equal to key, value
aws_session_token=1234ASDF=B returns ("aws_session_token", "1234ASDF=B") """
split_by_equal = string.split('=')
key = split_by_equal[0]
if len(split_by_equal) > 1:
value = '='.join( split_by_equal[1:] )
else:
value = None
return key, value
class AWS_Creds (ParentPluralDict) :
"""A class that contains all possible AWS Roles and their respective credentials
Creds: Dictionary where key is a role and value is an AWS_Cred class instance
string: string which contains the exported version of the AWS_Creds"""
def __init__( self, load_from_json=True, dict={} ):
ParentPluralDict.__init__( self, att='Creds' )
self.dict = dict
if load_from_json:
self._import_from_json()
self._load_Creds()
def _import_from_json( self ):
self.dict = self.cfg['access_keys.Path'].read_json_to_dict()
def export( self ):
for Cred in self:
self.dict[ Cred.role ] = Cred.dict
self._export_to_json()
def _export_to_json( self ):
self.cfg['access_keys.Path'].write( string = ps.dict_to_json(self.dict) )
def _load_Creds( self ):
for role in self.dict:
new_Cred = AWS_Cred( role=role, dict=self.dict[role] )
self.add_new_Cred( new_Cred )
def add_new_Cred( self, new_Cred ):
"""take a new Creds class instance, and add/overwrite the existing credentials"""
self._add( new_Cred.role, new_Cred )
def get_Cred_from_role( self, Cred_role ):
if Cred_role in self.Creds:
return self.Creds[Cred_role]
else:
print ('Could not find role ' + str(Cred_role) + ' in AWS_Creds object')
return None
def update_from_clipboard( self ):
### Using option 2 from the AWS console
new_Cred = AWS_Cred( string = ps.paste() )
new_Cred.print_atts()
if not new_Cred.role != None:
print ()
print ('------------')
print ('ERROR: Errant AWS selection. Did not find correct format')
print ('------------')
print ()
return False
self.add_new_Cred( new_Cred )
self.print_atts()
print ('Writing merged credentials to ' + str( self.cfg['access_keys.Path'] ))
self.export()
print ()
print ('------------')
print ('SUCCESS: AWS Credentials successfully updated')
print ('------------')
print ()
return True
|
PypiClean
|
/proteofav-0.2.3.tar.gz/proteofav-0.2.3/docs/example_usage.rst
|
=============
Example Usage
=============
Example usage is currently provided as a `Jupyter Notebook`, which can be viewed with the `GitHub's`_ file viewer or with the Jupyter `nbviewer`_.
You can download the Jupyter notebook from `GitHub`_ and test it with your ProteoFAV's installation.
.. _GitHub's: https://github.com/bartongroup/ProteoFAV/blob/master/Examples.ipynb
.. _nbviewer: https://nbviewer.jupyter.org/github/bartongroup/ProteoFAV/blob/master/Examples.ipynb
.. _GitHub: https://github.com/bartongroup/ProteoFAV
|
PypiClean
|
/lightautoml-gpu-1.0.0.tar.gz/lightautoml-gpu-1.0.0/lightautoml/validation/gpu/gpu_iterators.py
|
from typing import Optional
from typing import Tuple
from typing import Union
from typing import cast
import cupy as cp
from lightautoml.dataset.gpu.gpu_dataset import CupyDataset
from lightautoml.dataset.gpu.gpu_dataset import CudfDataset
from lightautoml.dataset.gpu.gpu_dataset import DaskCudfDataset
from lightautoml.validation.base import CustomIdxs
from lightautoml.validation.base import CustomIterator
from lightautoml.validation.base import DummyIterator
from lightautoml.validation.base import HoldoutIterator
from lightautoml.validation.base import TrainValidIterator
GpuDataset = Union[CupyDataset, CudfDataset, DaskCudfDataset]
class HoldoutIterator_gpu(HoldoutIterator):
"""Iterator for classic holdout - just predefined train and valid samples (GPU version requires indexing)."""
def __init__(self, train: GpuDataset, valid: GpuDataset):
"""Create iterator.
Args:
train: Dataset of train data.
valid: Dataset of valid data.
"""
self.train = train
self.valid = valid
def __len__(self) -> Optional[int]:
"""Get 1 len.
Returns:
1
"""
return 1
def __iter__(self) -> 'HoldoutIterator_gpu':
"""Simple iterable object.
Returns:
Iterable object for train validation dataset.
"""
return iter([(None, self.train, self.valid)])
def __getitem__(self, number):
if number >= 1:
raise IndexError('index out of range')
return None, self.train, self.valid
class FoldsIterator_gpu(TrainValidIterator):
"""Classic cv iterator.
Folds should be defined in Reader, based on cross validation method.
"""
def __init__(self, train: GpuDataset, n_folds: Optional[int] = None):
"""Creates iterator (GPU version).
Args:
train: Dataset for folding.
n_folds: Number of folds.
"""
assert hasattr(train, 'folds'), 'Folds in dataset should be defined to make folds iterator.'
self.train = train
max_folds = train.folds.max()
if type(train) == DaskCudfDataset:
max_folds = max_folds.compute()
self.n_folds = max_folds + 1
if n_folds is not None:
self.n_folds = min(self.n_folds, n_folds)
def __len__(self) -> int:
"""Get len of iterator.
Returns:
Number of folds.
"""
return self.n_folds
def __iter__(self) -> 'FoldsIterator':
"""Set counter to 0 and return self.
Returns:
Iterator for folds.
"""
self._curr_idx = 0
return self
def __getitem__(self, number):
if number >= self.n_folds:
raise IndexError('index out of range')
val_idx = (self.train.folds == number)
if type(self.train) == CudfDataset:
val_idx = val_idx.values
elif type(self.train) == DaskCudfDataset:
val_idx = val_idx.compute().values
tr_idx = cp.logical_not(val_idx)
idx = cp.arange(self.train.shape[0])
tr_idx, val_idx = idx[tr_idx], idx[val_idx]
if type(self.train) == DaskCudfDataset:
tr_idx = tr_idx.get()
val_idx = val_idx.get()
train, valid = self.train[tr_idx], self.train[val_idx]
return val_idx, cast(GpuDataset, train), cast(GpuDataset, valid)
def __next__(self) -> Tuple[cp.ndarray, GpuDataset, GpuDataset]:
"""Define how to get next object.
Returns:
Mask for current fold, train dataset, validation dataset.
"""
if self._curr_idx == self.n_folds:
raise StopIteration
val_idx = (self.train.folds == self._curr_idx)
if type(self.train) == CudfDataset:
val_idx = val_idx.values
elif type(self.train) == DaskCudfDataset:
val_idx = val_idx.compute().values
tr_idx = cp.logical_not(val_idx)
idx = cp.arange(self.train.shape[0])
tr_idx, val_idx = idx[tr_idx], idx[val_idx]
if type(self.train) == DaskCudfDataset:
tr_idx = tr_idx.get()
val_idx = val_idx.get()
train, valid = self.train[tr_idx], self.train[val_idx]
self._curr_idx += 1
return val_idx, train, valid
def get_validation_data(self) -> GpuDataset:
"""Just return train dataset.
Returns:
Whole train dataset.
"""
return self.train
def convert_to_holdout_iterator(self) -> HoldoutIterator_gpu:
"""Convert iterator to hold-out-iterator.
Fold 0 is used for validation, everything else is used for training.
Returns:
new hold-out-iterator.
"""
val_idx = (self.train.folds == 0)
if type(self.train) == CudfDataset:
val_idx = val_idx.values
elif type(self.train) == DaskCudfDataset:
val_idx = val_idx.compute().values
tr_idx = cp.logical_not(val_idx)
idx = cp.arange(self.train.shape[0])
tr_idx, val_idx = idx[tr_idx], idx[val_idx]
if type(self.train) == DaskCudfDataset:
tr_idx = tr_idx.get()
val_idx = val_idx.get()
train, valid = self.train[tr_idx], self.train[val_idx]
return HoldoutIterator_gpu(train, valid)
def get_gpu_iterator(
train: GpuDataset,
valid: Optional[GpuDataset] = None,
n_folds: Optional[int] = None,
iterator: Optional[CustomIdxs] = None
) -> Union[FoldsIterator_gpu, HoldoutIterator_gpu, HoldoutIterator, CustomIterator, DummyIterator]:
"""Get iterator for gpu dataset.
If valid is defined, other parameters are ignored.
Else if iterator is defined n_folds is ignored.
Else if n_folds is defined iterator will be created by folds index.
Else ``DummyIterator`` - (train, train) will be created.
Args:
train: ``LAMLDataset`` to train.
valid: Optional ``LAMLDataset`` for validate.
n_folds: maximum number of folds to iterate.
If ``None`` - iterate through all folds.
iterator: Takes dataset as input and return an iterator
of indexes of train/valid for train dataset.
Returns:
new train-validation iterator.
"""
if valid is not None:
train_valid = HoldoutIterator(train, valid)
elif iterator is not None:
train_valid = CustomIterator(train, iterator)
elif train.folds is not None:
train_valid = FoldsIterator_gpu(train, n_folds)
else:
train_valid = DummyIterator(train)
return train_valid
|
PypiClean
|
/mara-mondrian-2.0.2.tar.gz/mara-mondrian-2.0.2/README.md
|
# Mara Mondrian
[](https://travis-ci.org/mara/mara-mondrian)
[](https://github.com/mara/mara-mondrian/blob/master/LICENSE)
[](https://badge.fury.io/py/mara-mondrian)
[](https://communityinviter.com/apps/mara-users/public-invite)
A python interface for [Mondrian Server](https://github.com/project-a/mondrian-server), a [Mondrian](https://en.wikipedia.org/wiki/Mondrian_OLAP_server) [XMLA](https://en.wikipedia.org/wiki/XML_for_Analysis) server combined with the [Saiku](https://www.meteorite.bi/products/saiku/) ad hoc analysis tool. Comes with
- A Makefile for running Mondrian Server locally.
- [Mondrian schema](https://mondrian.pentaho.com/documentation/schema.php) generation from a [Mara Schema](https://github.com/mara/mara-schema) definition.
- Mondrian cache flushing.
- Saiku authentication via [Mara ACL](https://github.com/mara/mara-acl).
## Installation
To use the library directly, use pip:
```
pip install mara-mondrian
```
or
```
pip install git+https://github.com/mara/mara-mondrian.git
```
For an example of an integration into a flask application, have a look at the [mara example project 1](https://github.com/mara/mara-example-project-1).
## Running Saiku
From within a project, include [.scripts/mondrian-server.mk](https://github.com/mara/mara-mondrian/tree/master/.scripts/mondrian-server.mk) in your project Makefile (as for example in [https://github.com/mara/mara-example-project-1/blob/master/Makefile](https://github.com/mara/mara-example-project-1/blob/master/Makefile)).
Running `make setup-mondrian-server` will create the required `mondrian-server.properties` file. And then running `make run-mondrian-server` will start Saiku and the XMLA server on port 8080:

For running Mondrian Server in production, please have a look at [https://github.com/project-a/mondrian-server/blob/master/README.md](https://github.com/project-a/mondrian-server/blob/master/README.md).
## Features
### Mondrian schema generation
If you have a data warehouse schema defined in [Mara Schema](https://github.com/mara/mara-schema), then you can automatically create a Mondrian schema file using the function `write_mondrian_schema` in [mara_mondrian/schema_generation.py](mara_mondrian/schema_generation.py).
Have a look at [https://github.com/mara/mara-example-project-1/blob/master/app/pipelines/update_frontends/\_\_init\_\_.py](https://github.com/mara/mara-example-project-1/blob/master/app/pipelines/update_frontends/__init__.py) for an example.
### Mondrian cache flushing
The function `flush_mondrian_cache` in [mara_mondrian/connection.py](https://github.com/mara/mara-mondrian/tree/master/mara_mondrian/connection.py) triggers a reload of the schema and a flushing of all caches in Mondrian Server.
This file also contains functions for making XMLA requests.
### Saiku authentication via [Mara ACL](https://github.com/mara/mara-acl)
Once you add the Saiku ACL resource in [mara_mondrian/views.py](https://github.com/mara/mara-mondrian/tree/master/mara_mondrian/views.py) to your project, you can easily control which users can query which cubes:

In this example, users from the "Management" group can query all cubes, and users from "Marketing" only "Customers" and "Leads" (with the exception of Thomas who can also query "Order items" and "Sellers").
Please have a look at [https://github.com/project-a/mondrian-server/blob/master/README.md](https://github.com/project-a/mondrian-server/blob/master/README.md) for how to set this up.
Please make sure that the `/mondrian/saiku/authorize` endpoint is white-listed from the Mara ACL, as for example in [https://github.com/mara/mara-example-project-1/blob/master/app/ui/\_\_init\_\_.py](https://github.com/mara/mara-example-project-1/blob/master/app/ui/__init__.py):
```python
monkey_patch.patch(mara_acl.config.whitelisted_uris)(lambda: ['/mara-app/navigation-bar', '/mondrian/saiku/authorize'])
```
The easiest way to try out Mara Mondrian is to run the [mara example project 1](https://github.com/mara/mara-example-project-1).
|
PypiClean
|
/wheeler.demo_rpc-0.5.tar.gz/wheeler.demo_rpc-0.5/demo_rpc/config.py
|
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='config.proto',
package='',
syntax='proto2',
serialized_pb=_b('\n\x0c\x63onfig.proto\"O\n\x06\x43onfig\x12\x15\n\rserial_number\x18\x01 \x01(\r\x12\x19\n\tbaud_rate\x18\x02 \x01(\r:\x06\x31\x31\x35\x32\x30\x30\x12\x13\n\x0bi2c_address\x18\x03 \x01(\r\"B\n\x05State\x12\x1e\n\x0b\x66loat_value\x18\x01 \x01(\x02:\t10.987654\x12\x19\n\rinteger_value\x18\x02 \x01(\x05:\x02\x33\x37')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_CONFIG = _descriptor.Descriptor(
name='Config',
full_name='Config',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='serial_number', full_name='Config.serial_number', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='baud_rate', full_name='Config.baud_rate', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=115200,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='i2c_address', full_name='Config.i2c_address', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=16,
serialized_end=95,
)
_STATE = _descriptor.Descriptor(
name='State',
full_name='State',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='float_value', full_name='State.float_value', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=10.987654,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='integer_value', full_name='State.integer_value', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=37,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=97,
serialized_end=163,
)
DESCRIPTOR.message_types_by_name['Config'] = _CONFIG
DESCRIPTOR.message_types_by_name['State'] = _STATE
Config = _reflection.GeneratedProtocolMessageType('Config', (_message.Message,), dict(
DESCRIPTOR = _CONFIG,
__module__ = 'config_pb2'
# @@protoc_insertion_point(class_scope:Config)
))
_sym_db.RegisterMessage(Config)
State = _reflection.GeneratedProtocolMessageType('State', (_message.Message,), dict(
DESCRIPTOR = _STATE,
__module__ = 'config_pb2'
# @@protoc_insertion_point(class_scope:State)
))
_sym_db.RegisterMessage(State)
# @@protoc_insertion_point(module_scope)
|
PypiClean
|
/code_comment-0.1.0.tar.gz/code_comment-0.1.0/code_comment/lib.py
|
import os.path
from code_comment.models import Comment
from code_comment.errors import CodeLanguageUnsupported
class CodeLanguage:
PYTHON = 'python'
PHP = 'php'
JAVASCRIPT = 'javascript'
GOLANG = 'go'
@staticmethod
def factory(code_name):
if code_name == CodeLanguage.PYTHON:
return PythonCodeLanguage
if code_name == CodeLanguage.PHP:
return PHPCodeLanguage
elif code_name == CodeLanguage.JAVASCRIPT:
return JavascriptCodeLanguage
elif code_name == CodeLanguage.GOLANG:
return GolangCodeLanguage
raise CodeLanguageUnsupported
class BaseCodeLanguage(CodeLanguage):
# header, footer prefixes/suffixes
SINGLE_LINE_COMMENT = ('//', None)
# header, middle, footer prefixes/suffixes
MULTI_LINE_COMMENT = ('/*', None, '*/')
class JavascriptCodeLanguage(BaseCodeLanguage):
pass
class GolangCodeLanguage(BaseCodeLanguage):
pass
class PHPCodeLanguage(BaseCodeLanguage):
# NOTE: assuming PHPDoc style
MULTI_LINE_COMMENT = ('/**', '*', '*/')
class PythonCodeLanguage(CodeLanguage):
SINGLE_LINE_COMMENT = ('#', None)
MULTI_LINE_COMMENT = ('"""', None, '"""')
class Parser:
SUPPORTED_CODE_FILE_EXTENSIONS = {
'py': CodeLanguage.PYTHON,
'php': CodeLanguage.PHP,
'js': CodeLanguage.JAVASCRIPT,
'go': CodeLanguage.GOLANG
}
@staticmethod
def is_supported_code_extension(ext):
if not ext:
return False
return ext in Parser.SUPPORTED_CODE_FILE_EXTENSIONS
@staticmethod
def is_code_file(path):
if not os.path.isfile(path):
return False
return Parser.is_supported_code_extension(
os.path.splitext(path)[1][1:] # ignore '.'
)
def __init__(self, filepath):
self.filepath = filepath
if not self.is_code_file(self.filepath):
raise CodeLanguageUnsupported
self.code_language = CodeLanguage.factory(
self.determine_code_language()
)
def determine_code_language(self):
ext = os.path.splitext(self.filepath)[1][1:]
return self.SUPPORTED_CODE_FILE_EXTENSIONS.get(ext)
def __iter__(self):
return self.parse()
def parse(self):
c = self.code_language
slc_header, slc_footer = c.SINGLE_LINE_COMMENT
mlc_header, mlc_middle, mlc_footer = c.MULTI_LINE_COMMENT
# to hold current mulitline comment info temporarily;
# empty if parser not on multiline comment
tmp = []
def is_currently_multi_line_comment():
return bool(tmp)
def is_single_line_comment(text):
return (
not is_currently_multi_line_comment()
and text.startswith(slc_header)
and not slc_footer
)
def is_single_line_comment_multiline_notation(text):
return (
not is_currently_multi_line_comment()
and text.startswith(mlc_header)
and text.endswith(mlc_footer)
)
def is_multi_line_comment_start(text):
return (
not is_currently_multi_line_comment()
and text.startswith(mlc_header)
and not text.endswith(mlc_footer)
)
def is_multi_line_comment_midst(text):
return (
is_currently_multi_line_comment()
and not text.startswith(mlc_header)
and not text.endswith(mlc_footer)
and (not mlc_middle or text.startswith(mlc_middle))
)
def is_multi_line_comment_end(text):
return (
is_currently_multi_line_comment()
and text.endswith(mlc_footer)
)
with open(self.filepath, 'r') as f:
for line_number, text in enumerate(
[l.strip() for l in f], start=1
):
if not text:
continue
if is_single_line_comment(text):
comment_text = text.split(slc_header)[1].strip()
yield Comment(comment_text, self.filepath, line_number)
elif is_single_line_comment_multiline_notation(text):
comment_text = text.split(mlc_header)[1]
comment_text = comment_text.rsplit(mlc_footer)[0].strip()
yield Comment(comment_text, self.filepath, line_number)
elif is_multi_line_comment_start(text):
comment_text = text.split(mlc_header)[1].strip()
tmp.append([comment_text, line_number])
elif is_multi_line_comment_midst(text):
comment_text = text
if mlc_middle:
comment_text = text.split(mlc_middle)[1].strip()
tmp.append([comment_text, line_number])
elif is_multi_line_comment_end(text):
comment_text = text.rsplit(mlc_footer)[0].strip()
tmp.append([comment_text, line_number])
comment_texts, line_numbers = zip(*tmp)
tmp = []
yield Comment(
list(comment_texts),
self.filepath,
[line_numbers[0], line_numbers[-1]],
is_multiline=True
)
|
PypiClean
|
/simuPOP-1.1.12.tar.gz/simuPOP-1.1.12/src/gsl.py
|
from sys import version_info as _swig_python_version_info
if _swig_python_version_info >= (2, 7, 0):
def swig_import_helper():
import importlib
pkg = __name__.rpartition('.')[0]
mname = '.'.join((pkg, '_gsl')).lstrip('.')
try:
return importlib.import_module(mname)
except ImportError:
return importlib.import_module('_gsl')
_gsl = swig_import_helper()
del swig_import_helper
elif _swig_python_version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_gsl', [dirname(__file__)])
except ImportError:
import _gsl
return _gsl
try:
_mod = imp.load_module('_gsl', fp, pathname, description)
finally:
if fp is not None:
fp.close()
return _mod
_gsl = swig_import_helper()
del swig_import_helper
else:
import _gsl
del _swig_python_version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
try:
import builtins as __builtin__
except ImportError:
import __builtin__
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr(self, class_type, name):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
raise AttributeError("'%s' object has no attribute '%s'" % (class_type.__name__, name))
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except __builtin__.Exception:
class _object:
pass
_newclass = 0
def my_error_handler(reason: 'char const *', file: 'char const *', line: 'int', gsl_errno: 'int') -> "void":
return _gsl.my_error_handler(reason, file, line, gsl_errno)
my_error_handler = _gsl.my_error_handler
def initialize() -> "int":
return _gsl.initialize()
initialize = _gsl.initialize
def gsl_ran_gamma_pdf(x: 'double const', a: 'double const', b: 'double const') -> "double":
return _gsl.gsl_ran_gamma_pdf(x, a, b)
gsl_ran_gamma_pdf = _gsl.gsl_ran_gamma_pdf
def gsl_log1p(x: 'double const') -> "double":
return _gsl.gsl_log1p(x)
gsl_log1p = _gsl.gsl_log1p
def gsl_ran_binomial_pdf(k: 'unsigned int const', p: 'double const', n: 'unsigned int const') -> "double":
return _gsl.gsl_ran_binomial_pdf(k, p, n)
gsl_ran_binomial_pdf = _gsl.gsl_ran_binomial_pdf
def gsl_ran_beta_pdf(x: 'double const', a: 'double const', b: 'double const') -> "double":
return _gsl.gsl_ran_beta_pdf(x, a, b)
gsl_ran_beta_pdf = _gsl.gsl_ran_beta_pdf
def gsl_ran_poisson_pdf(k: 'unsigned int const', mu: 'double const') -> "double":
return _gsl.gsl_ran_poisson_pdf(k, mu)
gsl_ran_poisson_pdf = _gsl.gsl_ran_poisson_pdf
def gsl_cdf_gaussian_P(x: 'double', sigma: 'double') -> "double":
return _gsl.gsl_cdf_gaussian_P(x, sigma)
gsl_cdf_gaussian_P = _gsl.gsl_cdf_gaussian_P
def gsl_cdf_gaussian_Q(x: 'double', sigma: 'double') -> "double":
return _gsl.gsl_cdf_gaussian_Q(x, sigma)
gsl_cdf_gaussian_Q = _gsl.gsl_cdf_gaussian_Q
def gsl_cdf_gaussian_Pinv(P: 'double', sigma: 'double') -> "double":
return _gsl.gsl_cdf_gaussian_Pinv(P, sigma)
gsl_cdf_gaussian_Pinv = _gsl.gsl_cdf_gaussian_Pinv
def gsl_cdf_gaussian_Qinv(Q: 'double', sigma: 'double') -> "double":
return _gsl.gsl_cdf_gaussian_Qinv(Q, sigma)
gsl_cdf_gaussian_Qinv = _gsl.gsl_cdf_gaussian_Qinv
def gsl_cdf_ugaussian_P(x: 'double') -> "double":
return _gsl.gsl_cdf_ugaussian_P(x)
gsl_cdf_ugaussian_P = _gsl.gsl_cdf_ugaussian_P
def gsl_cdf_ugaussian_Q(x: 'double') -> "double":
return _gsl.gsl_cdf_ugaussian_Q(x)
gsl_cdf_ugaussian_Q = _gsl.gsl_cdf_ugaussian_Q
def gsl_cdf_ugaussian_Pinv(P: 'double') -> "double":
return _gsl.gsl_cdf_ugaussian_Pinv(P)
gsl_cdf_ugaussian_Pinv = _gsl.gsl_cdf_ugaussian_Pinv
def gsl_cdf_ugaussian_Qinv(Q: 'double') -> "double":
return _gsl.gsl_cdf_ugaussian_Qinv(Q)
gsl_cdf_ugaussian_Qinv = _gsl.gsl_cdf_ugaussian_Qinv
def gsl_cdf_exponential_P(x: 'double', mu: 'double') -> "double":
return _gsl.gsl_cdf_exponential_P(x, mu)
gsl_cdf_exponential_P = _gsl.gsl_cdf_exponential_P
def gsl_cdf_exponential_Q(x: 'double', mu: 'double') -> "double":
return _gsl.gsl_cdf_exponential_Q(x, mu)
gsl_cdf_exponential_Q = _gsl.gsl_cdf_exponential_Q
def gsl_cdf_exponential_Pinv(P: 'double', mu: 'double') -> "double":
return _gsl.gsl_cdf_exponential_Pinv(P, mu)
gsl_cdf_exponential_Pinv = _gsl.gsl_cdf_exponential_Pinv
def gsl_cdf_exponential_Qinv(Q: 'double', mu: 'double') -> "double":
return _gsl.gsl_cdf_exponential_Qinv(Q, mu)
gsl_cdf_exponential_Qinv = _gsl.gsl_cdf_exponential_Qinv
def gsl_cdf_chisq_P(x: 'double', nu: 'double') -> "double":
return _gsl.gsl_cdf_chisq_P(x, nu)
gsl_cdf_chisq_P = _gsl.gsl_cdf_chisq_P
def gsl_cdf_chisq_Q(x: 'double', nu: 'double') -> "double":
return _gsl.gsl_cdf_chisq_Q(x, nu)
gsl_cdf_chisq_Q = _gsl.gsl_cdf_chisq_Q
def gsl_cdf_chisq_Pinv(P: 'double', nu: 'double') -> "double":
return _gsl.gsl_cdf_chisq_Pinv(P, nu)
gsl_cdf_chisq_Pinv = _gsl.gsl_cdf_chisq_Pinv
def gsl_cdf_chisq_Qinv(Q: 'double', nu: 'double') -> "double":
return _gsl.gsl_cdf_chisq_Qinv(Q, nu)
gsl_cdf_chisq_Qinv = _gsl.gsl_cdf_chisq_Qinv
def gsl_cdf_gamma_P(x: 'double', a: 'double', b: 'double') -> "double":
return _gsl.gsl_cdf_gamma_P(x, a, b)
gsl_cdf_gamma_P = _gsl.gsl_cdf_gamma_P
def gsl_cdf_gamma_Q(x: 'double', a: 'double', b: 'double') -> "double":
return _gsl.gsl_cdf_gamma_Q(x, a, b)
gsl_cdf_gamma_Q = _gsl.gsl_cdf_gamma_Q
def gsl_cdf_gamma_Pinv(P: 'double', a: 'double', b: 'double') -> "double":
return _gsl.gsl_cdf_gamma_Pinv(P, a, b)
gsl_cdf_gamma_Pinv = _gsl.gsl_cdf_gamma_Pinv
def gsl_cdf_gamma_Qinv(Q: 'double', a: 'double', b: 'double') -> "double":
return _gsl.gsl_cdf_gamma_Qinv(Q, a, b)
gsl_cdf_gamma_Qinv = _gsl.gsl_cdf_gamma_Qinv
def gsl_cdf_binomial_P(k: 'unsigned int', p: 'double', n: 'unsigned int') -> "double":
return _gsl.gsl_cdf_binomial_P(k, p, n)
gsl_cdf_binomial_P = _gsl.gsl_cdf_binomial_P
def gsl_cdf_binomial_Q(k: 'unsigned int', p: 'double', n: 'unsigned int') -> "double":
return _gsl.gsl_cdf_binomial_Q(k, p, n)
gsl_cdf_binomial_Q = _gsl.gsl_cdf_binomial_Q
def gsl_cdf_beta_P(x: 'double', a: 'double', b: 'double') -> "double":
return _gsl.gsl_cdf_beta_P(x, a, b)
gsl_cdf_beta_P = _gsl.gsl_cdf_beta_P
def gsl_cdf_beta_Q(x: 'double', a: 'double', b: 'double') -> "double":
return _gsl.gsl_cdf_beta_Q(x, a, b)
gsl_cdf_beta_Q = _gsl.gsl_cdf_beta_Q
def gsl_cdf_beta_Pinv(P: 'double', a: 'double', b: 'double') -> "double":
return _gsl.gsl_cdf_beta_Pinv(P, a, b)
gsl_cdf_beta_Pinv = _gsl.gsl_cdf_beta_Pinv
def gsl_cdf_beta_Qinv(Q: 'double', a: 'double', b: 'double') -> "double":
return _gsl.gsl_cdf_beta_Qinv(Q, a, b)
gsl_cdf_beta_Qinv = _gsl.gsl_cdf_beta_Qinv
def gsl_cdf_poisson_P(k: 'unsigned int const', mu: 'double const') -> "double":
return _gsl.gsl_cdf_poisson_P(k, mu)
gsl_cdf_poisson_P = _gsl.gsl_cdf_poisson_P
def gsl_cdf_poisson_Q(k: 'unsigned int const', mu: 'double const') -> "double":
return _gsl.gsl_cdf_poisson_Q(k, mu)
gsl_cdf_poisson_Q = _gsl.gsl_cdf_poisson_Q
# This file is compatible with both classic and new-style classes.
|
PypiClean
|
/Prestige-0.0.1.tar.gz/Prestige-0.0.1/pylot/component/static/pylot/vendor/mdeditor/bower_components/codemirror/mode/turtle/turtle.js
|
CodeMirror.defineMode("turtle", function(config) {
var indentUnit = config.indentUnit;
var curPunc;
function wordRegexp(words) {
return new RegExp("^(?:" + words.join("|") + ")$", "i");
}
var ops = wordRegexp([]);
var keywords = wordRegexp(["@prefix", "@base", "a"]);
var operatorChars = /[*+\-<>=&|]/;
function tokenBase(stream, state) {
var ch = stream.next();
curPunc = null;
if (ch == "<" && !stream.match(/^[\s\u00a0=]/, false)) {
stream.match(/^[^\s\u00a0>]*>?/);
return "atom";
}
else if (ch == "\"" || ch == "'") {
state.tokenize = tokenLiteral(ch);
return state.tokenize(stream, state);
}
else if (/[{}\(\),\.;\[\]]/.test(ch)) {
curPunc = ch;
return null;
}
else if (ch == "#") {
stream.skipToEnd();
return "comment";
}
else if (operatorChars.test(ch)) {
stream.eatWhile(operatorChars);
return null;
}
else if (ch == ":") {
return "operator";
} else {
stream.eatWhile(/[_\w\d]/);
if(stream.peek() == ":") {
return "variable-3";
} else {
var word = stream.current();
if(keywords.test(word)) {
return "meta";
}
if(ch >= "A" && ch <= "Z") {
return "comment";
} else {
return "keyword";
}
}
var word = stream.current();
if (ops.test(word))
return null;
else if (keywords.test(word))
return "meta";
else
return "variable";
}
}
function tokenLiteral(quote) {
return function(stream, state) {
var escaped = false, ch;
while ((ch = stream.next()) != null) {
if (ch == quote && !escaped) {
state.tokenize = tokenBase;
break;
}
escaped = !escaped && ch == "\\";
}
return "string";
};
}
function pushContext(state, type, col) {
state.context = {prev: state.context, indent: state.indent, col: col, type: type};
}
function popContext(state) {
state.indent = state.context.indent;
state.context = state.context.prev;
}
return {
startState: function() {
return {tokenize: tokenBase,
context: null,
indent: 0,
col: 0};
},
token: function(stream, state) {
if (stream.sol()) {
if (state.context && state.context.align == null) state.context.align = false;
state.indent = stream.indentation();
}
if (stream.eatSpace()) return null;
var style = state.tokenize(stream, state);
if (style != "comment" && state.context && state.context.align == null && state.context.type != "pattern") {
state.context.align = true;
}
if (curPunc == "(") pushContext(state, ")", stream.column());
else if (curPunc == "[") pushContext(state, "]", stream.column());
else if (curPunc == "{") pushContext(state, "}", stream.column());
else if (/[\]\}\)]/.test(curPunc)) {
while (state.context && state.context.type == "pattern") popContext(state);
if (state.context && curPunc == state.context.type) popContext(state);
}
else if (curPunc == "." && state.context && state.context.type == "pattern") popContext(state);
else if (/atom|string|variable/.test(style) && state.context) {
if (/[\}\]]/.test(state.context.type))
pushContext(state, "pattern", stream.column());
else if (state.context.type == "pattern" && !state.context.align) {
state.context.align = true;
state.context.col = stream.column();
}
}
return style;
},
indent: function(state, textAfter) {
var firstChar = textAfter && textAfter.charAt(0);
var context = state.context;
if (/[\]\}]/.test(firstChar))
while (context && context.type == "pattern") context = context.prev;
var closing = context && firstChar == context.type;
if (!context)
return 0;
else if (context.type == "pattern")
return context.col;
else if (context.align)
return context.col + (closing ? 0 : 1);
else
return context.indent + (closing ? 0 : indentUnit);
}
};
});
CodeMirror.defineMIME("text/turtle", "turtle");
|
PypiClean
|
/eko-0.13.5-py3-none-any.whl/ekomark/benchmark/external/apfel_utils.py
|
import time
import numpy as np
from banana.benchmark.external.apfel_utils import load_apfel
from eko import basis_rotation as br
def compute_apfel_data(
theory, operators, pdf, skip_pdfs, rotate_to_evolution_basis=False
):
"""
Run APFEL to compute operators.
Parameters
----------
theory : dict
theory card
operators : dict
operators card
pdf : lhapdf_type
pdf
skip_pdfs : list
list of pdfs (pid or name) to skip
rotate_to_evolution_basis: bool
rotate to evolution basis
Returns
-------
ref : dict
output containing: target_xgrid, values
"""
target_xgrid = operators["interpolation_xgrid"]
pdf_name = pdf.set().name
# Load apfel
apf_start = time.perf_counter()
if theory["ModEv"] in ["EXA", "perturbative-exact"]:
theory["ModEv"] = "EXA"
elif theory["ModEv"] in ["EXP", "decompose-expanded", "perturbative-expanded"]:
theory["ModEv"] = "EXP"
elif theory["ModEv"] in ["TRN", "ordered-truncated"]:
theory["ModEv"] = "TRN"
else:
raise ValueError(f"Method {theory['ModEv']} is not recognized. ")
apfel = load_apfel(theory, operators, pdf_name)
# Truncated Epsilon
# APFEL::SetEpsilonTruncation(1E-1);
#
# Set maximum scale
# APFEL::SetQLimits(theory.Q0, theory.QM );
#
# if (theory.SIA)
# {
# APFEL::SetPDFSet("kretzer");
# APFEL::SetTimeLikeEvolution(true);
# }
# Set APFEL interpolation grid
#
# apfel.SetNumberOfGrids(3)
# apfel.SetGridParameters(1, 50, 3, 1e-5)
# apfel.SetGridParameters(2, 50, 3, 2e-1)
# apfel.SetGridParameters(3, 50, 3, 8e-1)
# init evolution
apfel.SetPolarizedEvolution(operators["polarized"])
apfel.InitializeAPFEL()
print(f"Loading APFEL took {(time.perf_counter() - apf_start)} s")
# Run
apf_tabs = {}
for mu in operators["mugrid"]:
apfel.EvolveAPFEL(theory["Q0"], mu)
print(f"Executing APFEL took {(time.perf_counter() - apf_start)} s")
tab = {}
for pid in br.flavor_basis_pids:
if pid in skip_pdfs:
continue
# collect APFEL
apf = []
for x in target_xgrid:
if pid != 22:
xf = apfel.xPDF(pid if pid != 21 else 0, x)
else:
xf = apfel.xgamma(x)
# if pid == 4:
# print(pid,x,xf)
apf.append(xf)
tab[pid] = np.array(apf)
# rotate if needed
if rotate_to_evolution_basis:
qed = theory["QED"] > 0
if not qed:
evol_basis = br.evol_basis
rotate_flavor_to_evolution = br.rotate_flavor_to_evolution
else:
evol_basis = br.unified_evol_basis
rotate_flavor_to_evolution = br.rotate_flavor_to_unified_evolution
pdfs = np.array(
[
tab[pid] if pid in tab else np.zeros(len(target_xgrid))
for pid in br.flavor_basis_pids
]
)
evol_pdf = rotate_flavor_to_evolution @ pdfs
tab = dict(zip(evol_basis, evol_pdf))
apf_tabs[mu**2] = tab
ref = {
"target_xgrid": target_xgrid,
"values": apf_tabs,
}
return ref
|
PypiClean
|
/ohmeow-blurr-1.0.5.tar.gz/ohmeow-blurr-1.0.5/blurr/text/data/question_answering.py
|
__all__ = ['QAPreprocessor', 'QATextInput', 'QABatchTokenizeTransform']
# Cell
import ast
from functools import reduce
from datasets import Dataset
from fastcore.all import *
from fastai.data.block import DataBlock, CategoryBlock, ColReader, ColSplitter
from fastai.imports import *
from fastai.losses import CrossEntropyLossFlat
from fastai.torch_core import *
from fastai.torch_imports import *
from transformers import AutoModelForQuestionAnswering, PretrainedConfig, PreTrainedTokenizerBase, PreTrainedModel, logging
from .core import TextInput, BatchTokenizeTransform, Preprocessor, first_blurr_tfm
from ..utils import get_hf_objects
logging.set_verbosity_error()
# Cell
class QAPreprocessor(Preprocessor):
def __init__(
self,
# A Hugging Face tokenizer
hf_tokenizer: PreTrainedTokenizerBase,
# The number of examples to process at a time
batch_size: int = 1000,
# The unique identifier in the dataset. If not specified and "return_overflowing_tokens": True, an "_id" attribute
# will be added to your dataset with its value a unique, sequential integer, assigned to each record
id_attr: Optional[str] = None,
# The attribute in your dataset that contains the context (where the answer is included) (default: 'context')
ctx_attr: str = "context",
# The attribute in your dataset that contains the question being asked (default: 'question')
qst_attr: str = "question",
# The attribute in your dataset that contains the actual answer (default: 'answer_text')
ans_attr: str = "answer_text",
# The attribute in your dataset that contains the actual answer (default: 'answer_text')
ans_start_char_idx: str = "ans_start_char_idx",
# The attribute in your dataset that contains the actual answer (default: 'answer_text')
ans_end_char_idx: str = "ans_end_char_idx",
# The attribute that should be created if your are processing individual training and validation
# datasets into a single dataset, and will indicate to which each example is associated
is_valid_attr: Optional[str] = "is_valid",
# Tokenization kwargs that will be applied with calling the tokenizer (default: {"return_overflowing_tokens": True})
tok_kwargs: dict = {"return_overflowing_tokens": True},
):
# these values are mandatory
tok_kwargs = {**tok_kwargs, "return_offsets_mapping": True}
# shift the question and context appropriately based on the tokenizers padding strategy
if hf_tokenizer.padding_side == "right":
tok_kwargs["truncation"] = "only_second"
text_attrs = [qst_attr, ctx_attr]
else:
tok_kwargs["truncation"] = "only_first"
text_attrs = [ctx_attr, qst_attr]
super().__init__(hf_tokenizer, batch_size, text_attr=text_attrs[0], text_pair_attr=text_attrs[1], tok_kwargs=tok_kwargs)
store_attr()
def process_df(self, training_df: pd.DataFrame, validation_df: Optional[pd.DataFrame] = None):
df = super().process_df(training_df, validation_df)
# a unique Id for each example is required to properly score question answering results when chunking long
# documents (e.g., return_overflowing_tokens=True)
chunk_docs = self.tok_kwargs.get("return_overflowing_tokens", False)
max_length = self.tok_kwargs.get("max_length", self.hf_tokenizer.model_max_length)
if self.id_attr is None and chunk_docs:
df.insert(0, "_id", range(len(df)))
# process df in mini-batches
final_df = pd.DataFrame()
for g, batch_df in df.groupby(np.arange(len(df)) // self.batch_size):
final_df = final_df.append(self._process_df_batch(batch_df, chunk_docs, max_length))
final_df.reset_index(drop=True, inplace=True)
return final_df
def process_hf_dataset(self, training_ds: Dataset, validation_ds: Optional[Dataset] = None):
ds = super().process_hf_dataset(training_ds, validation_ds)
return Dataset.from_pandas(self.process_df(pd.DataFrame(ds)))
# ----- utility methods -----
def _process_df_batch(self, batch_df, is_chunked, max_length):
batch_df.reset_index(drop=True, inplace=True)
# grab our inputs
inputs = self._tokenize_function(batch_df.to_dict(orient="list"))
offset_mapping = inputs.pop("offset_mapping")
sample_map = inputs.pop("overflow_to_sample_mapping", batch_df.index.tolist())
proc_data = []
for idx, offsets in enumerate(offset_mapping):
example_idx = sample_map[idx]
row = batch_df.iloc[example_idx]
input_ids = inputs["input_ids"][idx]
seq_ids = inputs.sequence_ids(idx)
# get question and context associated with the inputs at "idx"
qst_mask = [i != 1 if self.hf_tokenizer.padding_side == "right" else i != 0 for i in seq_ids]
qst_offsets = [offsets[i] for i, is_qst in enumerate(qst_mask) if is_qst and seq_ids[i] is not None]
ctx_offsets = [offsets[i] for i, is_qst in enumerate(qst_mask) if not is_qst and seq_ids[i] is not None]
proc_qst = row[self.qst_attr][min(qst_offsets)[0] : max(qst_offsets)[1]]
proc_ctx = row[self.ctx_attr][min(ctx_offsets)[0] : max(ctx_offsets)[1]]
# if we are chunking long documents, we need to tokenize the chunked question, context in order to correctly assign
# the start/end token indices, else we can just the above since we are only looking at one example at a time
if is_chunked:
chunk_texts = (proc_qst, proc_ctx) if self.hf_tokenizer.padding_side == "right" else (proc_ctx, proc_qst)
chunk_inputs = self.hf_tokenizer(chunk_texts[0], chunk_texts[1])
chunk_input_ids = chunk_inputs["input_ids"]
chunk_qst_mask = [i != 1 if self.hf_tokenizer.padding_side == "right" else i != 0 for i in chunk_inputs.sequence_ids()]
else:
chunk_input_ids, chunk_qst_mask = input_ids, qst_mask
# lastly we iterate over the input tokens to see if we can fine the answer tokens within (ignoring the input tokens
# belonging to the "question" as we only want to find answers that exist in the "context")
tok_input = self.hf_tokenizer.convert_ids_to_tokens(chunk_input_ids)
tok_ans = self.hf_tokenizer.tokenize(str(row[self.ans_attr]))
start_idx, end_idx = 0, 0
for idx, (tok, is_qst_tok) in enumerate(zip(tok_input, chunk_qst_mask)):
try:
if is_qst_tok == False and tok == tok_ans[0] and tok_input[idx : idx + len(tok_ans)] == tok_ans:
# ensure we are within the max_length
last_idx = idx + len(tok_ans)
if last_idx < max_length:
start_idx, end_idx = idx, idx + len(tok_ans)
break
except:
pass
# update the oringal example information with the processed question, context, start/end "token" indices, and
# a boolean indicating whether the question is answerable
overflow_row = row.copy()
overflow_row[f"proc_{self.qst_attr}"] = proc_qst
overflow_row[f"proc_{self.ctx_attr}"] = proc_ctx
overflow_row["ans_start_token_idx"] = start_idx
overflow_row["ans_end_token_idx"] = end_idx
overflow_row["is_answerable"] = start_idx != 0 and end_idx != 0
proc_data.append(overflow_row)
return pd.DataFrame(proc_data)
# Cell
class QATextInput(TextInput):
pass
# Cell
class QABatchTokenizeTransform(BatchTokenizeTransform):
def __init__(
self,
# The abbreviation/name of your Hugging Face transformer architecture (e.b., bert, bart, etc..)
hf_arch: str,
# A specific configuration instance you want to use
hf_config: PretrainedConfig,
# A Hugging Face tokenizer
hf_tokenizer: PreTrainedTokenizerBase,
# A Hugging Face model
hf_model: PreTrainedModel,
# To control whether the "labels" are included in your inputs. If they are, the loss will be calculated in
# the model's forward function and you can simply use `PreCalculatedLoss` as your `Learner`'s loss function to use it
include_labels: bool = True,
# The token ID that should be ignored when calculating the loss
ignore_token_id=CrossEntropyLossFlat().ignore_index,
# To control the length of the padding/truncation. It can be an integer or None,
# in which case it will default to the maximum length the model can accept. If the model has no
# specific maximum input length, truncation/padding to max_length is deactivated.
# See [Everything you always wanted to know about padding and truncation](https://huggingface.co/transformers/preprocessing.html#everything-you-always-wanted-to-know-about-padding-and-truncation)
max_length: int = None,
# To control the `padding` applied to your `hf_tokenizer` during tokenization. If None, will default to
# `False` or `'do_not_pad'.
# See [Everything you always wanted to know about padding and truncation](https://huggingface.co/transformers/preprocessing.html#everything-you-always-wanted-to-know-about-padding-and-truncation)
padding: Union[bool, str] = True,
# To control `truncation` applied to your `hf_tokenizer` during tokenization. If None, will default to
# `False` or `do_not_truncate`.
# See [Everything you always wanted to know about padding and truncation](https://huggingface.co/transformers/preprocessing.html#everything-you-always-wanted-to-know-about-padding-and-truncation)
truncation: Union[bool, str] = "only_second",
# The `is_split_into_words` argument applied to your `hf_tokenizer` during tokenization. Set this to `True`
# if your inputs are pre-tokenized (not numericalized)
is_split_into_words: bool = False,
# Any other keyword arguments you want included when using your `hf_tokenizer` to tokenize your inputs.
tok_kwargs: dict = {},
# Keyword arguments to apply to `BatchTokenizeTransform`
**kwargs
):
# "return_special_tokens_mask" and "return_offsets_mapping" are mandatory for extractive QA in blurr
tok_kwargs = {**tok_kwargs, **{"return_special_tokens_mask": True, "return_offsets_mapping": True}}
super().__init__(
hf_arch,
hf_config,
hf_tokenizer,
hf_model,
include_labels=include_labels,
ignore_token_id=ignore_token_id,
max_length=max_length,
padding=padding,
truncation=truncation,
is_split_into_words=is_split_into_words,
tok_kwargs=tok_kwargs,
**kwargs
)
def encodes(self, samples, return_batch_encoding=False):
updated_samples, batch_encoding = super().encodes(samples, return_batch_encoding=True)
for idx, s in enumerate(updated_samples):
# cls_index: location of CLS token (used by xlnet and xlm); is a list.index(value) for pytorch tensor's
s[0]["cls_index"] = (s[0]["input_ids"] == self.hf_tokenizer.cls_token_id).nonzero()[0]
# p_mask: mask with 1 for token than cannot be in the answer, else 0 (used by xlnet and xlm)
s[0]["p_mask"] = s[0]["special_tokens_mask"]
trgs = s[1:]
if self.include_labels and len(trgs) > 0:
s[0].pop("labels") # this is added by base class, but is not needed for extractive QA
s[0]["start_positions"] = trgs[0]
s[0]["end_positions"] = trgs[1]
if return_batch_encoding:
return updated_samples, inputs
return updated_samples
# Cell
@typedispatch
def show_batch(
# This typedispatched `show_batch` will be called for `QuestionAnswerTextInput` typed inputs
x: QATextInput,
# Your targets
y,
# Your raw inputs/targets
samples,
# Your `DataLoaders`. This is required so as to get at the Hugging Face objects for
# decoding them into something understandable
dataloaders,
# Your `show_batch` context
ctxs=None,
# The maximum number of items to show
max_n=6,
# Any truncation your want applied to your decoded inputs
trunc_at=None,
# Any other keyword arguments you want applied to `show_batch`
**kwargs
):
# grab our tokenizer
tfm = first_blurr_tfm(dataloaders, tfms=[QABatchTokenizeTransform])
hf_tokenizer = tfm.hf_tokenizer
res = L()
for sample, input_ids, start, end in zip(samples, x, *y):
txt = hf_tokenizer.decode(sample[0], skip_special_tokens=True)[:trunc_at]
found = start.item() != 0 and end.item() != 0
ans_text = hf_tokenizer.decode(input_ids[start:end], skip_special_tokens=True)
res.append((txt, found, (start.item(), end.item()), ans_text))
display_df(pd.DataFrame(res, columns=["text", "found", "start/end", "answer"])[:max_n])
return ctxs
|
PypiClean
|
/TashriTelethon-1.24.0-py3-none-any.whl/telethon/extensions/html.py
|
import struct
from collections import deque
from html import escape
from html.parser import HTMLParser
from typing import Iterable, Optional, Tuple, List
from .. import helpers
from ..tl.types import (
MessageEntityBold, MessageEntityItalic, MessageEntityCode,
MessageEntityPre, MessageEntityEmail, MessageEntityUrl,
MessageEntityTextUrl, MessageEntityMentionName,
MessageEntityUnderline, MessageEntityStrike, MessageEntityBlockquote,
TypeMessageEntity
)
# Helpers from markdown.py
def _add_surrogate(text):
return ''.join(
''.join(chr(y) for y in struct.unpack('<HH', x.encode('utf-16le')))
if (0x10000 <= ord(x) <= 0x10FFFF) else x for x in text
)
def _del_surrogate(text):
return text.encode('utf-16', 'surrogatepass').decode('utf-16')
class HTMLToTelegramParser(HTMLParser):
def __init__(self):
super().__init__()
self.text = ''
self.entities = []
self._building_entities = {}
self._open_tags = deque()
self._open_tags_meta = deque()
def handle_starttag(self, tag, attrs):
self._open_tags.appendleft(tag)
self._open_tags_meta.appendleft(None)
attrs = dict(attrs)
EntityType = None
args = {}
if tag == 'strong' or tag == 'b':
EntityType = MessageEntityBold
elif tag == 'em' or tag == 'i':
EntityType = MessageEntityItalic
elif tag == 'u':
EntityType = MessageEntityUnderline
elif tag == 'del' or tag == 's':
EntityType = MessageEntityStrike
elif tag == 'blockquote':
EntityType = MessageEntityBlockquote
elif tag == 'code':
try:
# If we're in the middle of a <pre> tag, this <code> tag is
# probably intended for syntax highlighting.
#
# Syntax highlighting is set with
# <code class='language-...'>codeblock</code>
# inside <pre> tags
pre = self._building_entities['pre']
try:
pre.language = attrs['class'][len('language-'):]
except KeyError:
pass
except KeyError:
EntityType = MessageEntityCode
elif tag == 'pre':
EntityType = MessageEntityPre
args['language'] = ''
elif tag == 'a':
try:
url = attrs['href']
except KeyError:
return
if url.startswith('mailto:'):
url = url[len('mailto:'):]
EntityType = MessageEntityEmail
else:
if self.get_starttag_text() == url:
EntityType = MessageEntityUrl
else:
EntityType = MessageEntityTextUrl
args['url'] = url
url = None
self._open_tags_meta.popleft()
self._open_tags_meta.appendleft(url)
if EntityType and tag not in self._building_entities:
self._building_entities[tag] = EntityType(
offset=len(self.text),
# The length will be determined when closing the tag.
length=0,
**args)
def handle_data(self, text):
previous_tag = self._open_tags[0] if len(self._open_tags) > 0 else ''
if previous_tag == 'a':
url = self._open_tags_meta[0]
if url:
text = url
for tag, entity in self._building_entities.items():
entity.length += len(text)
self.text += text
def handle_endtag(self, tag):
try:
self._open_tags.popleft()
self._open_tags_meta.popleft()
except IndexError:
pass
entity = self._building_entities.pop(tag, None)
if entity:
self.entities.append(entity)
def parse(html: str) -> Tuple[str, List[TypeMessageEntity]]:
"""
Parses the given HTML message and returns its stripped representation
plus a list of the MessageEntity's that were found.
:param html: the message with HTML to be parsed.
:return: a tuple consisting of (clean message, [message entities]).
"""
if not html:
return html, []
parser = HTMLToTelegramParser()
parser.feed(_add_surrogate(html))
text = helpers.strip_text(parser.text, parser.entities)
return _del_surrogate(text), parser.entities
def unparse(text: str, entities: Iterable[TypeMessageEntity], _offset: int = 0,
_length: Optional[int] = None) -> str:
"""
Performs the reverse operation to .parse(), effectively returning HTML
given a normal text and its MessageEntity's.
:param text: the text to be reconverted into HTML.
:param entities: the MessageEntity's applied to the text.
:return: a HTML representation of the combination of both inputs.
"""
if not text:
return text
elif not entities:
return escape(text)
text = _add_surrogate(text)
if _length is None:
_length = len(text)
html = []
last_offset = 0
for i, entity in enumerate(entities):
if entity.offset >= _offset + _length:
break
relative_offset = entity.offset - _offset
if relative_offset > last_offset:
html.append(escape(text[last_offset:relative_offset]))
elif relative_offset < last_offset:
continue
skip_entity = False
length = entity.length
# If we are in the middle of a surrogate nudge the position by +1.
# Otherwise we would end up with malformed text and fail to encode.
# For example of bad input: "Hi \ud83d\ude1c"
# https://en.wikipedia.org/wiki/UTF-16#U+010000_to_U+10FFFF
while helpers.within_surrogate(text, relative_offset, length=_length):
relative_offset += 1
while helpers.within_surrogate(text, relative_offset + length, length=_length):
length += 1
entity_text = unparse(text=text[relative_offset:relative_offset + length],
entities=entities[i + 1:],
_offset=entity.offset, _length=length)
entity_type = type(entity)
if entity_type == MessageEntityBold:
html.append('<strong>{}</strong>'.format(entity_text))
elif entity_type == MessageEntityItalic:
html.append('<em>{}</em>'.format(entity_text))
elif entity_type == MessageEntityCode:
html.append('<code>{}</code>'.format(entity_text))
elif entity_type == MessageEntityUnderline:
html.append('<u>{}</u>'.format(entity_text))
elif entity_type == MessageEntityStrike:
html.append('<del>{}</del>'.format(entity_text))
elif entity_type == MessageEntityBlockquote:
html.append('<blockquote>{}</blockquote>'.format(entity_text))
elif entity_type == MessageEntityPre:
if entity.language:
html.append(
"<pre>\n"
" <code class='language-{}'>\n"
" {}\n"
" </code>\n"
"</pre>".format(entity.language, entity_text))
else:
html.append('<pre><code>{}</code></pre>'
.format(entity_text))
elif entity_type == MessageEntityEmail:
html.append('<a href="mailto:{0}">{0}</a>'.format(entity_text))
elif entity_type == MessageEntityUrl:
html.append('<a href="{0}">{0}</a>'.format(entity_text))
elif entity_type == MessageEntityTextUrl:
html.append('<a href="{}">{}</a>'
.format(escape(entity.url), entity_text))
elif entity_type == MessageEntityMentionName:
html.append('<a href="tg://user?id={}">{}</a>'
.format(entity.user_id, entity_text))
else:
skip_entity = True
last_offset = relative_offset + (0 if skip_entity else length)
while helpers.within_surrogate(text, last_offset, length=_length):
last_offset += 1
html.append(escape(text[last_offset:]))
return _del_surrogate(''.join(html))
|
PypiClean
|
/comt-2.6.4.tar.gz/comt-2.6.4/src/cm/media/js/lib/yui/yui3-3.15.0/build/tree/tree-coverage.js
|
if (typeof __coverage__ === 'undefined') { __coverage__ = {}; }
if (!__coverage__['build/tree/tree.js']) {
__coverage__['build/tree/tree.js'] = {"path":"build/tree/tree.js","s":{"1":0,"2":0,"3":0,"4":0,"5":0,"6":0,"7":0,"8":0,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":0,"28":0,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0,"38":0,"39":0,"40":0,"41":0,"42":0,"43":0,"44":0,"45":0,"46":0,"47":0,"48":0,"49":0,"50":0,"51":0,"52":0,"53":0,"54":0,"55":0,"56":0,"57":0,"58":0,"59":0,"60":0,"61":0,"62":0,"63":0,"64":0,"65":0,"66":0,"67":0,"68":0,"69":0,"70":0,"71":0,"72":0,"73":0,"74":0,"75":0,"76":0,"77":0,"78":0,"79":0,"80":0,"81":0,"82":0,"83":0,"84":0,"85":0,"86":0,"87":0,"88":0,"89":0,"90":0,"91":0,"92":0,"93":0,"94":0,"95":0,"96":0,"97":0,"98":0,"99":0,"100":0,"101":0,"102":0,"103":0,"104":0,"105":0,"106":0,"107":0,"108":0,"109":0,"110":0,"111":0,"112":0,"113":0,"114":0,"115":0,"116":0,"117":0,"118":0,"119":0,"120":0,"121":0,"122":0,"123":0,"124":0,"125":0,"126":0,"127":0,"128":0,"129":0,"130":0,"131":0,"132":0,"133":0,"134":0,"135":0,"136":0,"137":0,"138":0,"139":0,"140":0,"141":0,"142":0,"143":0,"144":0,"145":0,"146":0,"147":0,"148":0,"149":0,"150":0,"151":0,"152":0,"153":0,"154":0,"155":0,"156":0,"157":0,"158":0,"159":0,"160":0,"161":0,"162":0,"163":0,"164":0,"165":0,"166":0,"167":0,"168":0,"169":0,"170":0,"171":0,"172":0,"173":0,"174":0,"175":0,"176":0,"177":0,"178":0,"179":0,"180":0,"181":0,"182":0,"183":0,"184":0,"185":0,"186":0,"187":0},"b":{"1":[0,0],"2":[0,0],"3":[0,0],"4":[0,0],"5":[0,0],"6":[0,0],"7":[0,0],"8":[0,0],"9":[0,0],"10":[0,0],"11":[0,0],"12":[0,0],"13":[0,0],"14":[0,0],"15":[0,0],"16":[0,0],"17":[0,0],"18":[0,0],"19":[0,0],"20":[0,0],"21":[0,0],"22":[0,0],"23":[0,0],"24":[0,0],"25":[0,0],"26":[0,0],"27":[0,0],"28":[0,0],"29":[0,0],"30":[0,0],"31":[0,0],"32":[0,0],"33":[0,0],"34":[0,0],"35":[0,0],"36":[0,0],"37":[0,0],"38":[0,0],"39":[0,0],"40":[0,0],"41":[0,0],"42":[0,0],"43":[0,0],"44":[0,0],"45":[0,0],"46":[0,0,0],"47":[0,0],"48":[0,0],"49":[0,0],"50":[0,0],"51":[0,0],"52":[0,0],"53":[0,0],"54":[0,0],"55":[0,0],"56":[0,0],"57":[0,0]},"f":{"1":0,"2":0,"3":0,"4":0,"5":0,"6":0,"7":0,"8":0,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":0},"fnMap":{"1":{"name":"(anonymous_1)","line":1,"loc":{"start":{"line":1,"column":16},"end":{"line":1,"column":35}}},"2":{"name":"(anonymous_2)","line":162,"loc":{"start":{"line":162,"column":17},"end":{"line":162,"column":35}}},"3":{"name":"(anonymous_3)","line":184,"loc":{"start":{"line":184,"column":44},"end":{"line":184,"column":56}}},"4":{"name":"(anonymous_4)","line":195,"loc":{"start":{"line":195,"column":16},"end":{"line":195,"column":28}}},"5":{"name":"(anonymous_5)","line":226,"loc":{"start":{"line":226,"column":16},"end":{"line":226,"column":49}}},"6":{"name":"(anonymous_6)","line":250,"loc":{"start":{"line":250,"column":11},"end":{"line":250,"column":40}}},"7":{"name":"(anonymous_7)","line":272,"loc":{"start":{"line":272,"column":16},"end":{"line":272,"column":34}}},"8":{"name":"(anonymous_8)","line":319,"loc":{"start":{"line":319,"column":17},"end":{"line":319,"column":42}}},"9":{"name":"(anonymous_9)","line":369,"loc":{"start":{"line":369,"column":15},"end":{"line":369,"column":40}}},"10":{"name":"(anonymous_10)","line":406,"loc":{"start":{"line":406,"column":14},"end":{"line":406,"column":58}}},"11":{"name":"(anonymous_11)","line":416,"loc":{"start":{"line":416,"column":41},"end":{"line":416,"column":63}}},"12":{"name":"(anonymous_12)","line":434,"loc":{"start":{"line":434,"column":17},"end":{"line":434,"column":31}}},"13":{"name":"(anonymous_13)","line":465,"loc":{"start":{"line":465,"column":16},"end":{"line":465,"column":49}}},"14":{"name":"(anonymous_14)","line":543,"loc":{"start":{"line":543,"column":17},"end":{"line":543,"column":50}}},"15":{"name":"(anonymous_15)","line":568,"loc":{"start":{"line":568,"column":16},"end":{"line":568,"column":41}}},"16":{"name":"(anonymous_16)","line":592,"loc":{"start":{"line":592,"column":10},"end":{"line":592,"column":22}}},"17":{"name":"(anonymous_17)","line":602,"loc":{"start":{"line":602,"column":12},"end":{"line":602,"column":24}}},"18":{"name":"(anonymous_18)","line":640,"loc":{"start":{"line":640,"column":18},"end":{"line":640,"column":62}}},"19":{"name":"(anonymous_19)","line":686,"loc":{"start":{"line":686,"column":16},"end":{"line":686,"column":41}}},"20":{"name":"(anonymous_20)","line":725,"loc":{"start":{"line":725,"column":23},"end":{"line":725,"column":35}}},"21":{"name":"(anonymous_21)","line":748,"loc":{"start":{"line":748,"column":24},"end":{"line":748,"column":36}}},"22":{"name":"(anonymous_22)","line":781,"loc":{"start":{"line":781,"column":20},"end":{"line":781,"column":53}}},"23":{"name":"(anonymous_23)","line":815,"loc":{"start":{"line":815,"column":26},"end":{"line":815,"column":63}}},"24":{"name":"(anonymous_24)","line":826,"loc":{"start":{"line":826,"column":27},"end":{"line":826,"column":43}}},"25":{"name":"(anonymous_25)","line":849,"loc":{"start":{"line":849,"column":15},"end":{"line":849,"column":28}}},"26":{"name":"(anonymous_26)","line":887,"loc":{"start":{"line":887,"column":17},"end":{"line":887,"column":30}}},"27":{"name":"(anonymous_27)","line":901,"loc":{"start":{"line":901,"column":18},"end":{"line":901,"column":31}}}},"statementMap":{"1":{"start":{"line":1,"column":0},"end":{"line":928,"column":59}},"2":{"start":{"line":39,"column":0},"end":{"line":77,"column":26}},"3":{"start":{"line":79,"column":0},"end":{"line":923,"column":3}},"4":{"start":{"line":163,"column":8},"end":{"line":163,"column":32}},"5":{"start":{"line":165,"column":8},"end":{"line":167,"column":9}},"6":{"start":{"line":166,"column":12},"end":{"line":166,"column":46}},"7":{"start":{"line":169,"column":8},"end":{"line":171,"column":9}},"8":{"start":{"line":170,"column":12},"end":{"line":170,"column":84}},"9":{"start":{"line":180,"column":8},"end":{"line":180,"column":50}},"10":{"start":{"line":181,"column":8},"end":{"line":181,"column":27}},"11":{"start":{"line":184,"column":8},"end":{"line":192,"column":11}},"12":{"start":{"line":185,"column":12},"end":{"line":185,"column":37}},"13":{"start":{"line":187,"column":12},"end":{"line":187,"column":56}},"14":{"start":{"line":189,"column":12},"end":{"line":191,"column":13}},"15":{"start":{"line":190,"column":16},"end":{"line":190,"column":77}},"16":{"start":{"line":196,"column":8},"end":{"line":196,"column":56}},"17":{"start":{"line":198,"column":8},"end":{"line":198,"column":31}},"18":{"start":{"line":199,"column":8},"end":{"line":199,"column":31}},"19":{"start":{"line":200,"column":8},"end":{"line":200,"column":31}},"20":{"start":{"line":201,"column":8},"end":{"line":201,"column":31}},"21":{"start":{"line":202,"column":8},"end":{"line":202,"column":31}},"22":{"start":{"line":227,"column":8},"end":{"line":230,"column":12}},"23":{"start":{"line":251,"column":8},"end":{"line":257,"column":11}},"24":{"start":{"line":273,"column":8},"end":{"line":273,"column":32}},"25":{"start":{"line":277,"column":8},"end":{"line":285,"column":9}},"26":{"start":{"line":278,"column":12},"end":{"line":281,"column":13}},"27":{"start":{"line":279,"column":16},"end":{"line":279,"column":95}},"28":{"start":{"line":280,"column":16},"end":{"line":280,"column":28}},"29":{"start":{"line":283,"column":12},"end":{"line":283,"column":36}},"30":{"start":{"line":284,"column":12},"end":{"line":284,"column":26}},"31":{"start":{"line":288,"column":8},"end":{"line":296,"column":9}},"32":{"start":{"line":289,"column":12},"end":{"line":289,"column":30}},"33":{"start":{"line":291,"column":12},"end":{"line":293,"column":13}},"34":{"start":{"line":292,"column":16},"end":{"line":292,"column":67}},"35":{"start":{"line":295,"column":12},"end":{"line":295,"column":59}},"36":{"start":{"line":298,"column":8},"end":{"line":298,"column":53}},"37":{"start":{"line":300,"column":8},"end":{"line":300,"column":45}},"38":{"start":{"line":320,"column":8},"end":{"line":320,"column":26}},"39":{"start":{"line":322,"column":8},"end":{"line":322,"column":34}},"40":{"start":{"line":324,"column":8},"end":{"line":334,"column":9}},"41":{"start":{"line":325,"column":12},"end":{"line":325,"column":37}},"42":{"start":{"line":330,"column":12},"end":{"line":330,"column":32}},"43":{"start":{"line":333,"column":12},"end":{"line":333,"column":45}},"44":{"start":{"line":336,"column":8},"end":{"line":338,"column":9}},"45":{"start":{"line":337,"column":12},"end":{"line":337,"column":43}},"46":{"start":{"line":340,"column":8},"end":{"line":340,"column":28}},"47":{"start":{"line":341,"column":8},"end":{"line":341,"column":28}},"48":{"start":{"line":342,"column":8},"end":{"line":342,"column":43}},"49":{"start":{"line":343,"column":8},"end":{"line":343,"column":30}},"50":{"start":{"line":344,"column":8},"end":{"line":344,"column":28}},"51":{"start":{"line":346,"column":8},"end":{"line":346,"column":38}},"52":{"start":{"line":348,"column":8},"end":{"line":348,"column":20}},"53":{"start":{"line":370,"column":8},"end":{"line":371,"column":26}},"54":{"start":{"line":373,"column":8},"end":{"line":375,"column":9}},"55":{"start":{"line":374,"column":12},"end":{"line":374,"column":63}},"56":{"start":{"line":377,"column":8},"end":{"line":377,"column":23}},"57":{"start":{"line":407,"column":8},"end":{"line":407,"column":25}},"58":{"start":{"line":410,"column":8},"end":{"line":414,"column":9}},"59":{"start":{"line":411,"column":12},"end":{"line":411,"column":32}},"60":{"start":{"line":412,"column":12},"end":{"line":412,"column":31}},"61":{"start":{"line":413,"column":12},"end":{"line":413,"column":26}},"62":{"start":{"line":416,"column":8},"end":{"line":421,"column":11}},"63":{"start":{"line":417,"column":12},"end":{"line":420,"column":13}},"64":{"start":{"line":418,"column":16},"end":{"line":418,"column":35}},"65":{"start":{"line":419,"column":16},"end":{"line":419,"column":43}},"66":{"start":{"line":423,"column":8},"end":{"line":423,"column":21}},"67":{"start":{"line":435,"column":8},"end":{"line":435,"column":33}},"68":{"start":{"line":466,"column":8},"end":{"line":466,"column":34}},"69":{"start":{"line":467,"column":8},"end":{"line":467,"column":44}},"70":{"start":{"line":481,"column":8},"end":{"line":499,"column":9}},"71":{"start":{"line":482,"column":12},"end":{"line":484,"column":29}},"72":{"start":{"line":486,"column":12},"end":{"line":496,"column":13}},"73":{"start":{"line":487,"column":16},"end":{"line":487,"column":73}},"74":{"start":{"line":489,"column":16},"end":{"line":495,"column":17}},"75":{"start":{"line":490,"column":20},"end":{"line":490,"column":53}},"76":{"start":{"line":492,"column":20},"end":{"line":494,"column":21}},"77":{"start":{"line":493,"column":24},"end":{"line":493,"column":43}},"78":{"start":{"line":498,"column":12},"end":{"line":498,"column":33}},"79":{"start":{"line":501,"column":8},"end":{"line":501,"column":37}},"80":{"start":{"line":503,"column":8},"end":{"line":519,"column":9}},"81":{"start":{"line":504,"column":12},"end":{"line":504,"column":38}},"82":{"start":{"line":506,"column":12},"end":{"line":508,"column":13}},"83":{"start":{"line":507,"column":16},"end":{"line":507,"column":73}},"84":{"start":{"line":510,"column":12},"end":{"line":518,"column":15}},"85":{"start":{"line":521,"column":8},"end":{"line":521,"column":20}},"86":{"start":{"line":544,"column":8},"end":{"line":547,"column":12}},"87":{"start":{"line":569,"column":8},"end":{"line":569,"column":34}},"88":{"start":{"line":571,"column":8},"end":{"line":579,"column":11}},"89":{"start":{"line":581,"column":8},"end":{"line":581,"column":20}},"90":{"start":{"line":593,"column":8},"end":{"line":593,"column":40}},"91":{"start":{"line":603,"column":8},"end":{"line":603,"column":38}},"92":{"start":{"line":641,"column":8},"end":{"line":644,"column":9}},"93":{"start":{"line":642,"column":12},"end":{"line":642,"column":85}},"94":{"start":{"line":643,"column":12},"end":{"line":643,"column":19}},"95":{"start":{"line":647,"column":8},"end":{"line":651,"column":9}},"96":{"start":{"line":648,"column":12},"end":{"line":648,"column":32}},"97":{"start":{"line":649,"column":12},"end":{"line":649,"column":31}},"98":{"start":{"line":650,"column":12},"end":{"line":650,"column":26}},"99":{"start":{"line":653,"column":8},"end":{"line":653,"column":34}},"100":{"start":{"line":655,"column":8},"end":{"line":656,"column":61}},"101":{"start":{"line":658,"column":8},"end":{"line":660,"column":9}},"102":{"start":{"line":659,"column":12},"end":{"line":659,"column":24}},"103":{"start":{"line":662,"column":8},"end":{"line":662,"column":37}},"104":{"start":{"line":664,"column":8},"end":{"line":672,"column":9}},"105":{"start":{"line":665,"column":12},"end":{"line":665,"column":80}},"106":{"start":{"line":667,"column":12},"end":{"line":671,"column":13}},"107":{"start":{"line":668,"column":16},"end":{"line":670,"column":17}},"108":{"start":{"line":669,"column":20},"end":{"line":669,"column":32}},"109":{"start":{"line":687,"column":8},"end":{"line":687,"column":32}},"110":{"start":{"line":689,"column":8},"end":{"line":691,"column":9}},"111":{"start":{"line":690,"column":12},"end":{"line":690,"column":19}},"112":{"start":{"line":693,"column":8},"end":{"line":695,"column":9}},"113":{"start":{"line":694,"column":12},"end":{"line":694,"column":62}},"114":{"start":{"line":697,"column":8},"end":{"line":697,"column":42}},"115":{"start":{"line":698,"column":8},"end":{"line":698,"column":41}},"116":{"start":{"line":703,"column":8},"end":{"line":707,"column":9}},"117":{"start":{"line":706,"column":12},"end":{"line":706,"column":50}},"118":{"start":{"line":709,"column":8},"end":{"line":709,"column":25}},"119":{"start":{"line":710,"column":8},"end":{"line":710,"column":38}},"120":{"start":{"line":726,"column":8},"end":{"line":728,"column":26}},"121":{"start":{"line":730,"column":8},"end":{"line":740,"column":9}},"122":{"start":{"line":732,"column":12},"end":{"line":732,"column":67}},"123":{"start":{"line":734,"column":12},"end":{"line":739,"column":13}},"124":{"start":{"line":735,"column":16},"end":{"line":735,"column":43}},"125":{"start":{"line":737,"column":16},"end":{"line":737,"column":76}},"126":{"start":{"line":738,"column":16},"end":{"line":738,"column":23}},"127":{"start":{"line":742,"column":8},"end":{"line":745,"column":9}},"128":{"start":{"line":743,"column":12},"end":{"line":743,"column":40}},"129":{"start":{"line":744,"column":12},"end":{"line":744,"column":19}},"130":{"start":{"line":748,"column":8},"end":{"line":756,"column":10}},"131":{"start":{"line":749,"column":12},"end":{"line":749,"column":59}},"132":{"start":{"line":751,"column":12},"end":{"line":751,"column":45}},"133":{"start":{"line":753,"column":12},"end":{"line":755,"column":13}},"134":{"start":{"line":754,"column":16},"end":{"line":754,"column":53}},"135":{"start":{"line":758,"column":8},"end":{"line":758,"column":43}},"136":{"start":{"line":760,"column":8},"end":{"line":762,"column":9}},"137":{"start":{"line":761,"column":12},"end":{"line":761,"column":78}},"138":{"start":{"line":764,"column":8},"end":{"line":764,"column":55}},"139":{"start":{"line":765,"column":8},"end":{"line":765,"column":40}},"140":{"start":{"line":782,"column":8},"end":{"line":795,"column":9}},"141":{"start":{"line":783,"column":12},"end":{"line":786,"column":13}},"142":{"start":{"line":784,"column":16},"end":{"line":784,"column":37}},"143":{"start":{"line":785,"column":16},"end":{"line":785,"column":53}},"144":{"start":{"line":788,"column":12},"end":{"line":792,"column":13}},"145":{"start":{"line":789,"column":16},"end":{"line":791,"column":19}},"146":{"start":{"line":794,"column":12},"end":{"line":794,"column":36}},"147":{"start":{"line":797,"column":8},"end":{"line":797,"column":20}},"148":{"start":{"line":816,"column":8},"end":{"line":816,"column":38}},"149":{"start":{"line":827,"column":8},"end":{"line":828,"column":18}},"150":{"start":{"line":830,"column":8},"end":{"line":845,"column":9}},"151":{"start":{"line":831,"column":12},"end":{"line":831,"column":41}},"152":{"start":{"line":833,"column":12},"end":{"line":844,"column":13}},"153":{"start":{"line":834,"column":16},"end":{"line":834,"column":47}},"154":{"start":{"line":836,"column":16},"end":{"line":841,"column":17}},"155":{"start":{"line":837,"column":20},"end":{"line":837,"column":35}},"156":{"start":{"line":839,"column":20},"end":{"line":839,"column":46}},"157":{"start":{"line":840,"column":20},"end":{"line":840,"column":48}},"158":{"start":{"line":843,"column":16},"end":{"line":843,"column":35}},"159":{"start":{"line":850,"column":8},"end":{"line":853,"column":21}},"160":{"start":{"line":856,"column":8},"end":{"line":877,"column":9}},"161":{"start":{"line":859,"column":12},"end":{"line":871,"column":13}},"162":{"start":{"line":860,"column":16},"end":{"line":860,"column":48}},"163":{"start":{"line":862,"column":16},"end":{"line":870,"column":17}},"164":{"start":{"line":865,"column":20},"end":{"line":865,"column":27}},"165":{"start":{"line":866,"column":23},"end":{"line":870,"column":17}},"166":{"start":{"line":869,"column":20},"end":{"line":869,"column":31}},"167":{"start":{"line":873,"column":12},"end":{"line":876,"column":15}},"168":{"start":{"line":880,"column":8},"end":{"line":880,"column":29}},"169":{"start":{"line":881,"column":8},"end":{"line":881,"column":47}},"170":{"start":{"line":883,"column":8},"end":{"line":883,"column":38}},"171":{"start":{"line":884,"column":8},"end":{"line":884,"column":38}},"172":{"start":{"line":888,"column":8},"end":{"line":888,"column":37}},"173":{"start":{"line":890,"column":8},"end":{"line":892,"column":9}},"174":{"start":{"line":891,"column":12},"end":{"line":891,"column":60}},"175":{"start":{"line":894,"column":8},"end":{"line":894,"column":27}},"176":{"start":{"line":895,"column":8},"end":{"line":895,"column":52}},"177":{"start":{"line":897,"column":8},"end":{"line":897,"column":36}},"178":{"start":{"line":898,"column":8},"end":{"line":898,"column":45}},"179":{"start":{"line":902,"column":8},"end":{"line":902,"column":26}},"180":{"start":{"line":904,"column":8},"end":{"line":912,"column":9}},"181":{"start":{"line":905,"column":12},"end":{"line":905,"column":51}},"182":{"start":{"line":906,"column":15},"end":{"line":912,"column":9}},"183":{"start":{"line":907,"column":12},"end":{"line":907,"column":45}},"184":{"start":{"line":908,"column":15},"end":{"line":912,"column":9}},"185":{"start":{"line":910,"column":12},"end":{"line":910,"column":66}},"186":{"start":{"line":911,"column":12},"end":{"line":911,"column":51}},"187":{"start":{"line":925,"column":0},"end":{"line":925,"column":29}}},"branchMap":{"1":{"line":163,"type":"binary-expr","locations":[{"start":{"line":163,"column":8},"end":{"line":163,"column":14}},{"start":{"line":163,"column":19},"end":{"line":163,"column":30}}]},"2":{"line":165,"type":"if","locations":[{"start":{"line":165,"column":8},"end":{"line":165,"column":8}},{"start":{"line":165,"column":8},"end":{"line":165,"column":8}}]},"3":{"line":169,"type":"if","locations":[{"start":{"line":169,"column":8},"end":{"line":169,"column":8}},{"start":{"line":169,"column":8},"end":{"line":169,"column":8}}]},"4":{"line":180,"type":"binary-expr","locations":[{"start":{"line":180,"column":8},"end":{"line":180,"column":23}},{"start":{"line":180,"column":28},"end":{"line":180,"column":48}}]},"5":{"line":189,"type":"if","locations":[{"start":{"line":189,"column":12},"end":{"line":189,"column":12}},{"start":{"line":189,"column":12},"end":{"line":189,"column":12}}]},"6":{"line":252,"type":"binary-expr","locations":[{"start":{"line":252,"column":38},"end":{"line":252,"column":46}},{"start":{"line":252,"column":50},"end":{"line":252,"column":70}}]},"7":{"line":253,"type":"binary-expr","locations":[{"start":{"line":253,"column":22},"end":{"line":253,"column":29}},{"start":{"line":253,"column":33},"end":{"line":253,"column":44}}]},"8":{"line":256,"type":"binary-expr","locations":[{"start":{"line":256,"column":23},"end":{"line":256,"column":30}},{"start":{"line":256,"column":34},"end":{"line":256,"column":48}}]},"9":{"line":273,"type":"binary-expr","locations":[{"start":{"line":273,"column":8},"end":{"line":273,"column":14}},{"start":{"line":273,"column":19},"end":{"line":273,"column":30}}]},"10":{"line":277,"type":"if","locations":[{"start":{"line":277,"column":8},"end":{"line":277,"column":8}},{"start":{"line":277,"column":8},"end":{"line":277,"column":8}}]},"11":{"line":278,"type":"if","locations":[{"start":{"line":278,"column":12},"end":{"line":278,"column":12}},{"start":{"line":278,"column":12},"end":{"line":278,"column":12}}]},"12":{"line":288,"type":"if","locations":[{"start":{"line":288,"column":8},"end":{"line":288,"column":8}},{"start":{"line":288,"column":8},"end":{"line":288,"column":8}}]},"13":{"line":322,"type":"binary-expr","locations":[{"start":{"line":322,"column":8},"end":{"line":322,"column":15}},{"start":{"line":322,"column":20},"end":{"line":322,"column":32}}]},"14":{"line":336,"type":"if","locations":[{"start":{"line":336,"column":8},"end":{"line":336,"column":8}},{"start":{"line":336,"column":8},"end":{"line":336,"column":8}}]},"15":{"line":410,"type":"if","locations":[{"start":{"line":410,"column":8},"end":{"line":410,"column":8}},{"start":{"line":410,"column":8},"end":{"line":410,"column":8}}]},"16":{"line":417,"type":"if","locations":[{"start":{"line":417,"column":12},"end":{"line":417,"column":12}},{"start":{"line":417,"column":12},"end":{"line":417,"column":12}}]},"17":{"line":466,"type":"binary-expr","locations":[{"start":{"line":466,"column":8},"end":{"line":466,"column":15}},{"start":{"line":466,"column":20},"end":{"line":466,"column":32}}]},"18":{"line":467,"type":"binary-expr","locations":[{"start":{"line":467,"column":8},"end":{"line":467,"column":14}},{"start":{"line":467,"column":20},"end":{"line":467,"column":42}}]},"19":{"line":481,"type":"if","locations":[{"start":{"line":481,"column":8},"end":{"line":481,"column":8}},{"start":{"line":481,"column":8},"end":{"line":481,"column":8}}]},"20":{"line":481,"type":"binary-expr","locations":[{"start":{"line":481,"column":12},"end":{"line":481,"column":28}},{"start":{"line":481,"column":32},"end":{"line":481,"column":50}}]},"21":{"line":489,"type":"if","locations":[{"start":{"line":489,"column":16},"end":{"line":489,"column":16}},{"start":{"line":489,"column":16},"end":{"line":489,"column":16}}]},"22":{"line":492,"type":"if","locations":[{"start":{"line":492,"column":20},"end":{"line":492,"column":20}},{"start":{"line":492,"column":20},"end":{"line":492,"column":20}}]},"23":{"line":503,"type":"if","locations":[{"start":{"line":503,"column":8},"end":{"line":503,"column":8}},{"start":{"line":503,"column":8},"end":{"line":503,"column":8}}]},"24":{"line":506,"type":"if","locations":[{"start":{"line":506,"column":12},"end":{"line":506,"column":12}},{"start":{"line":506,"column":12},"end":{"line":506,"column":12}}]},"25":{"line":514,"type":"binary-expr","locations":[{"start":{"line":514,"column":24},"end":{"line":514,"column":35}},{"start":{"line":514,"column":39},"end":{"line":514,"column":47}}]},"26":{"line":569,"type":"binary-expr","locations":[{"start":{"line":569,"column":8},"end":{"line":569,"column":15}},{"start":{"line":569,"column":20},"end":{"line":569,"column":32}}]},"27":{"line":575,"type":"binary-expr","locations":[{"start":{"line":575,"column":21},"end":{"line":575,"column":32}},{"start":{"line":575,"column":36},"end":{"line":575,"column":44}}]},"28":{"line":641,"type":"if","locations":[{"start":{"line":641,"column":8},"end":{"line":641,"column":8}},{"start":{"line":641,"column":8},"end":{"line":641,"column":8}}]},"29":{"line":647,"type":"if","locations":[{"start":{"line":647,"column":8},"end":{"line":647,"column":8}},{"start":{"line":647,"column":8},"end":{"line":647,"column":8}}]},"30":{"line":653,"type":"binary-expr","locations":[{"start":{"line":653,"column":8},"end":{"line":653,"column":15}},{"start":{"line":653,"column":20},"end":{"line":653,"column":32}}]},"31":{"line":658,"type":"if","locations":[{"start":{"line":658,"column":8},"end":{"line":658,"column":8}},{"start":{"line":658,"column":8},"end":{"line":658,"column":8}}]},"32":{"line":664,"type":"if","locations":[{"start":{"line":664,"column":8},"end":{"line":664,"column":8}},{"start":{"line":664,"column":8},"end":{"line":664,"column":8}}]},"33":{"line":664,"type":"binary-expr","locations":[{"start":{"line":664,"column":12},"end":{"line":664,"column":21}},{"start":{"line":664,"column":25},"end":{"line":664,"column":42}}]},"34":{"line":665,"type":"cond-expr","locations":[{"start":{"line":665,"column":43},"end":{"line":665,"column":50}},{"start":{"line":665,"column":53},"end":{"line":665,"column":79}}]},"35":{"line":668,"type":"if","locations":[{"start":{"line":668,"column":16},"end":{"line":668,"column":16}},{"start":{"line":668,"column":16},"end":{"line":668,"column":16}}]},"36":{"line":689,"type":"if","locations":[{"start":{"line":689,"column":8},"end":{"line":689,"column":8}},{"start":{"line":689,"column":8},"end":{"line":689,"column":8}}]},"37":{"line":703,"type":"if","locations":[{"start":{"line":703,"column":8},"end":{"line":703,"column":8}},{"start":{"line":703,"column":8},"end":{"line":703,"column":8}}]},"38":{"line":703,"type":"binary-expr","locations":[{"start":{"line":703,"column":12},"end":{"line":703,"column":46}},{"start":{"line":704,"column":19},"end":{"line":704,"column":57}}]},"39":{"line":730,"type":"if","locations":[{"start":{"line":730,"column":8},"end":{"line":730,"column":8}},{"start":{"line":730,"column":8},"end":{"line":730,"column":8}}]},"40":{"line":734,"type":"if","locations":[{"start":{"line":734,"column":12},"end":{"line":734,"column":12}},{"start":{"line":734,"column":12},"end":{"line":734,"column":12}}]},"41":{"line":742,"type":"if","locations":[{"start":{"line":742,"column":8},"end":{"line":742,"column":8}},{"start":{"line":742,"column":8},"end":{"line":742,"column":8}}]},"42":{"line":782,"type":"if","locations":[{"start":{"line":782,"column":8},"end":{"line":782,"column":8}},{"start":{"line":782,"column":8},"end":{"line":782,"column":8}}]},"43":{"line":782,"type":"binary-expr","locations":[{"start":{"line":782,"column":12},"end":{"line":782,"column":19}},{"start":{"line":782,"column":23},"end":{"line":782,"column":37}}]},"44":{"line":783,"type":"if","locations":[{"start":{"line":783,"column":12},"end":{"line":783,"column":12}},{"start":{"line":783,"column":12},"end":{"line":783,"column":12}}]},"45":{"line":788,"type":"if","locations":[{"start":{"line":788,"column":12},"end":{"line":788,"column":12}},{"start":{"line":788,"column":12},"end":{"line":788,"column":12}}]},"46":{"line":788,"type":"binary-expr","locations":[{"start":{"line":788,"column":16},"end":{"line":788,"column":23}},{"start":{"line":788,"column":27},"end":{"line":788,"column":44}},{"start":{"line":788,"column":48},"end":{"line":788,"column":70}}]},"47":{"line":830,"type":"if","locations":[{"start":{"line":830,"column":8},"end":{"line":830,"column":8}},{"start":{"line":830,"column":8},"end":{"line":830,"column":8}}]},"48":{"line":833,"type":"if","locations":[{"start":{"line":833,"column":12},"end":{"line":833,"column":12}},{"start":{"line":833,"column":12},"end":{"line":833,"column":12}}]},"49":{"line":836,"type":"if","locations":[{"start":{"line":836,"column":16},"end":{"line":836,"column":16}},{"start":{"line":836,"column":16},"end":{"line":836,"column":16}}]},"50":{"line":856,"type":"if","locations":[{"start":{"line":856,"column":8},"end":{"line":856,"column":8}},{"start":{"line":856,"column":8},"end":{"line":856,"column":8}}]},"51":{"line":859,"type":"if","locations":[{"start":{"line":859,"column":12},"end":{"line":859,"column":12}},{"start":{"line":859,"column":12},"end":{"line":859,"column":12}}]},"52":{"line":862,"type":"if","locations":[{"start":{"line":862,"column":16},"end":{"line":862,"column":16}},{"start":{"line":862,"column":16},"end":{"line":862,"column":16}}]},"53":{"line":866,"type":"if","locations":[{"start":{"line":866,"column":23},"end":{"line":866,"column":23}},{"start":{"line":866,"column":23},"end":{"line":866,"column":23}}]},"54":{"line":890,"type":"if","locations":[{"start":{"line":890,"column":8},"end":{"line":890,"column":8}},{"start":{"line":890,"column":8},"end":{"line":890,"column":8}}]},"55":{"line":904,"type":"if","locations":[{"start":{"line":904,"column":8},"end":{"line":904,"column":8}},{"start":{"line":904,"column":8},"end":{"line":904,"column":8}}]},"56":{"line":906,"type":"if","locations":[{"start":{"line":906,"column":15},"end":{"line":906,"column":15}},{"start":{"line":906,"column":15},"end":{"line":906,"column":15}}]},"57":{"line":908,"type":"if","locations":[{"start":{"line":908,"column":15},"end":{"line":908,"column":15}},{"start":{"line":908,"column":15},"end":{"line":908,"column":15}}]}},"code":["(function () { YUI.add('tree', function (Y, NAME) {","","/*jshint boss:true, expr:true, onevar:false */","","/**","Provides a generic tree data structure and related functionality.","","A tree has a root node, which may contain any number of child nodes, which may","themselves contain child nodes, ad infinitum.","","Child nodes are lightweight function instances which delegate to the tree for","all significant functionality, so trees remain performant and memory-efficient","even with thousands and thousands of nodes.","","@module tree","@main tree","**/","","/**","The `Tree` class represents a generic tree data structure. A tree has a root","node, which may contain any number of child nodes, which may themselves contain","child nodes, ad infinitum.","","This class doesn't expose any UI, but is intended to be used as a data structure","or base class for other components. For example, the SmugMug TreeView gallery","module extends Tree and provides a TreeView UI.","","@class Tree","@param {Object} [config] Config options."," @param {Object[]|Tree.Node[]} [config.nodes] Array of tree node config"," objects or `Tree.Node` instances to add to this tree at initialization"," time."," @param {Object|Tree.Node} [config.rootNode] Node to use as the root node of"," this tree.","@constructor","@extends Base","**/","","var Lang = Y.Lang,",""," /**"," Fired when a node is added to this Tree. The `src` property will indicate"," how the node was added (\"append\", \"insert\", \"prepend\", etc.).",""," @event add"," @param {Number} index Index at which the node will be added."," @param {Tree.Node} node Node being added."," @param {Tree.Node} parent Parent node to which the node will be added."," @param {String} src Source of the event (\"append\", \"insert\", \"prepend\","," etc.)."," @preventable _defAddFn"," **/"," EVT_ADD = 'add',",""," /**"," Fired when this Tree is cleared.",""," @event clear"," @param {Tree.Node} rootNode New root node of this tree (the old root node is"," always destroyed when a tree is cleared)."," @param {String} src Source of the event."," @preventable _defClearFn"," **/"," EVT_CLEAR = 'clear',",""," /**"," Fired when a node is removed from this Tree.",""," @event remove"," @param {Boolean} destroy Whether or not the node will be destroyed after"," being removed from this tree."," @param {Tree.Node} node Node being removed."," @param {Tree.Node} parent Parent node from which the node will be removed."," @param {String} src Source of the event."," @preventable _defRemoveFn"," **/"," EVT_REMOVE = 'remove';","","var Tree = Y.Base.create('tree', Y.Base, [], {"," // -- Public Properties ----------------------------------------------------",""," /**"," Reference to the `children` array of this Tree's `rootNode`.",""," This is a convenience property to allow you to type `tree.children` instead"," of `tree.rootNode.children`.",""," @property {Tree.Node[]} children"," @readOnly"," **/",""," /**"," The `Tree.Node` class or subclass that should be used for nodes created by"," this tree.",""," You may specify an actual class reference or a string that resolves to a"," class reference at runtime.",""," @property {String|Tree.Node} nodeClass"," @default Y.Tree.Node"," **/"," nodeClass: Y.Tree.Node,",""," /**"," Optional array containing one or more extension classes that should be mixed"," into the `nodeClass` when this Tree is instantiated. The resulting composed"," node class will be unique to this Tree instance and will not affect any"," other instances, nor will it modify the defined `nodeClass` itself.",""," This provides a late-binding extension mechanism for nodes that doesn't"," require them to extend `Y.Base`, which would incur a significant performance"," hit.",""," @property {Array} nodeExtensions"," @default []"," **/"," nodeExtensions: [],",""," /**"," Root node of this Tree.",""," @property {Tree.Node} rootNode"," @readOnly"," **/",""," // -- Protected Properties -------------------------------------------------",""," /**"," Simple way to type-check that this is a Tree instance.",""," @property {Boolean} _isYUITree"," @default true"," @protected"," **/"," _isYUITree: true,",""," /**"," Composed node class based on `nodeClass` that mixes in any extensions"," specified in `nodeExtensions`. If there are no extensions, this will just be"," a reference to `nodeClass`.",""," @property {Tree.Node} _nodeClass"," @protected"," **/",""," /**"," Mapping of node ids to node instances for nodes in this tree.",""," @property {Object} _nodeMap"," @protected"," **/",""," /**"," Default config object for the root node.",""," @property {Object} _rootNodeConfig"," @protected"," **/"," _rootNodeConfig: {canHaveChildren: true},",""," // -- Lifecycle ------------------------------------------------------------"," initializer: function (config) {"," config || (config = {});",""," if (config.nodeClass) {"," this.nodeClass = config.nodeClass;"," }",""," if (config.nodeExtensions) {"," this.nodeExtensions = this.nodeExtensions.concat(config.nodeExtensions);"," }",""," /**"," Hash of published custom events.",""," @property {Object} _published"," @default {}"," @protected"," **/"," this._published || (this._published = {});"," this._nodeMap = {};",""," // Allow all extensions to initialize, then finish up."," this.onceAfter('initializedChange', function () {"," this._composeNodeClass();",""," this.clear(config.rootNode, {silent: true});",""," if (config.nodes) {"," this.insertNode(this.rootNode, config.nodes, {silent: true});"," }"," });"," },",""," destructor: function () {"," this.destroyNode(this.rootNode, {silent: true});",""," this.children = null;"," this.rootNode = null;"," this._nodeClass = null;"," this._nodeMap = null;"," this._published = null;"," },",""," // -- Public Methods -------------------------------------------------------",""," /**"," Appends a node or array of nodes as the last child of the specified parent"," node.",""," If a node being appended is from another tree, it and all its children will"," be removed from that tree and moved to this one.",""," @method appendNode"," @param {Tree.Node} parent Parent node."," @param {Object|Object[]|Tree.Node|Tree.Node[]} node Child node, node config"," object, array of child nodes, or array of node config objects to append"," to the given parent. Node config objects will automatically be converted"," into node instances."," @param {Object} [options] Options."," @param {Boolean} [options.silent=false] If `true`, the `add` event will"," be suppressed."," @return {Tree.Node|Tree.Node[]} Node or array of nodes that were"," appended."," **/"," appendNode: function (parent, node, options) {"," return this.insertNode(parent, node, Y.merge(options, {"," index: parent.children.length,"," src : 'append'"," }));"," },",""," /**"," Clears this tree by destroying the root node and all its children. If a"," `rootNode` argument is provided, that node will become the root node of this"," tree; otherwise, a new root node will be created.",""," @method clear"," @param {Object|Tree.Node} [rootNode] If specified, this node will be used as"," the new root node."," @param {Object} [options] Options."," @param {Boolean} [options.silent=false] If `true`, the `clear` event"," will be suppressed."," @param {String} [options.src] Source of the change, to be passed along"," to the event facade of the resulting event. This can be used to"," distinguish between changes triggered by a user and changes"," triggered programmatically, for example."," @chainable"," **/"," clear: function (rootNode, options) {"," return this._fireTreeEvent(EVT_CLEAR, {"," rootNode: this.createNode(rootNode || this._rootNodeConfig),"," src : options && options.src"," }, {"," defaultFn: this._defClearFn,"," silent : options && options.silent"," });"," },",""," /**"," Creates and returns a new `Tree.Node` instance associated with (but not"," yet appended to) this tree.",""," @method createNode"," @param {Object|Tree.Node} [config] Node configuration. If a `Tree.Node`"," instance is specified instead of a config object, that node will be"," adopted into this tree (if it doesn't already belong to this tree) and"," removed from any other tree to which it belongs."," @return {Tree.Node|null} New node, or `null` if a node could not be created"," from the given _config_."," **/"," createNode: function (config) {"," config || (config = {});",""," // If `config` is already a node, just ensure it hasn't been destroyed"," // and is in the node map, then return it."," if (config._isYUITreeNode) {"," if (config.state.destroyed) {"," Y.error('Cannot insert a node that has already been destroyed.', null, 'tree');"," return null;"," }",""," this._adoptNode(config);"," return config;"," }",""," // First, create nodes for any children of this node."," if (config.children) {"," var children = [];",""," for (var i = 0, len = config.children.length; i < len; i++) {"," children.push(this.createNode(config.children[i]));"," }",""," config = Y.merge(config, {children: children});"," }",""," var node = new this._nodeClass(this, config);",""," return this._nodeMap[node.id] = node;"," },",""," /**"," Removes and destroys a node and all its child nodes. Once destroyed, a node"," is eligible for garbage collection and cannot be reused or re-added to the"," tree.",""," @method destroyNode"," @param {Tree.Node} node Node to destroy."," @param {Object} [options] Options."," @param {Boolean} [options.silent=false] If `true`, `remove` events will"," be suppressed."," @param {String} [options.src] Source of the change, to be passed along"," to the event facade of the resulting events. This can be used to"," distinguish between changes triggered by a user and changes"," triggered programmatically, for example."," @chainable"," **/"," destroyNode: function (node, options) {"," var child, i, len;",""," options || (options = {});",""," for (i = 0, len = node.children.length; i < len; i++) {"," child = node.children[i];",""," // Manually remove the child from its parent; this makes destroying"," // all children of the parent much faster since there's no splicing"," // involved."," child.parent = null;",""," // Destroy the child."," this.destroyNode(child, options);"," }",""," if (node.parent) {"," this.removeNode(node, options);"," }",""," node.children = [];"," node.data = {};"," node.state = {destroyed: true};"," node.tree = null;"," node._indexMap = {};",""," delete this._nodeMap[node.id];",""," return this;"," },",""," /**"," Removes all children from the specified node. The removed children will"," still be reusable unless the `destroy` option is truthy.",""," @method emptyNode"," @param {Tree.Node} node Node to empty."," @param {Object} [options] Options."," @param {Boolean} [options.destroy=false] If `true`, the children will"," also be destroyed, which makes them available for garbage collection"," and means they can't be reused."," @param {Boolean} [options.silent=false] If `true`, `remove` events will"," be suppressed."," @param {String} [options.src] Source of the change, to be passed along"," to the event facade of the resulting events. This can be used to"," distinguish between changes triggered by a user and changes"," triggered programmatically, for example."," @return {Tree.Node[]} Array of removed child nodes."," **/"," emptyNode: function (node, options) {"," var children = node.children,"," removed = [];",""," for (var i = children.length - 1; i > -1; --i) {"," removed[i] = this.removeNode(children[i], options);"," }",""," return removed;"," },",""," /**"," Performs a depth-first traversal of _node_, passing it and each of its"," descendants to the specified _callback_, and returning the first node for"," which the callback returns a truthy value.",""," Traversal will stop as soon as a truthy value is returned from the callback.",""," See `traverseNode()` for more details on how depth-first traversal works.",""," @method findNode"," @param {Tree.Node} node Node to traverse."," @param {Object} [options] Options."," @param {Number} [options.depth] Depth limit. If specified, descendants"," will only be traversed to this depth before backtracking and moving"," on."," @param {Function} callback Callback function to call with the traversed"," node and each of its descendants. If this function returns a truthy"," value, traversal will be stopped and the current node will be returned.",""," @param {Tree.Node} callback.node Node being traversed.",""," @param {Object} [thisObj] `this` object to use when executing _callback_."," @return {Tree.Node|null} Returns the first node for which the _callback_"," returns a truthy value, or `null` if the callback never returns a truthy"," value."," **/"," findNode: function (node, options, callback, thisObj) {"," var match = null;",""," // Allow callback as second argument."," if (typeof options === 'function') {"," thisObj = callback;"," callback = options;"," options = {};"," }",""," this.traverseNode(node, options, function (descendant) {"," if (callback.call(thisObj, descendant)) {"," match = descendant;"," return Tree.STOP_TRAVERSAL;"," }"," });",""," return match;"," },",""," /**"," Returns the tree node with the specified id, or `undefined` if the node"," doesn't exist in this tree.",""," @method getNodeById"," @param {String} id Node id."," @return {Tree.Node} Node, or `undefined` if not found."," **/"," getNodeById: function (id) {"," return this._nodeMap[id];"," },",""," /**"," Inserts a node or array of nodes at the specified index under the given"," parent node, or appends them to the parent if no index is specified.",""," If a node being inserted is from another tree, it and all its children will"," be removed from that tree and moved to this one.",""," @method insertNode"," @param {Tree.Node} parent Parent node."," @param {Object|Object[]|Tree.Node|Tree.Node[]} node Child node, node config"," object, array of child nodes, or array of node config objects to insert"," under the given parent. Node config objects will automatically be"," converted into node instances.",""," @param {Object} [options] Options."," @param {Number} [options.index] Index at which to insert the child node."," If not specified, the node will be appended as the last child of the"," parent."," @param {Boolean} [options.silent=false] If `true`, the `add` event will"," be suppressed."," @param {String} [options.src='insert'] Source of the change, to be"," passed along to the event facade of the resulting event. This can be"," used to distinguish between changes triggered by a user and changes"," triggered programmatically, for example.",""," @return {Tree.Node|Tree.Node[]} Node or array of nodes that were inserted."," **/"," insertNode: function (parent, node, options) {"," options || (options = {});"," parent || (parent = this.rootNode);",""," // If `node` is an array, recurse to insert each node it contains."," //"," // Note: If you're getting an exception here because `node` is null when"," // you've passed in a reference to some other node's `children` array,"," // that's happening because nodes must be removed from their current"," // parent before being added to the new one, and the `children` array is"," // being modified while the nodes are inserted."," //"," // Solution: pass a copy of the other node's `children` array instead of"," // the original. Doing the copy operation here would have a negative"," // impact on performance, so you're on your own since this is such a"," // rare edge case."," if ('length' in node && Lang.isArray(node)) {"," var hasIndex = 'index' in options,"," insertedNodes = [],"," insertedNode;",""," for (var i = 0, len = node.length; i < len; i++) {"," insertedNode = this.insertNode(parent, node[i], options);",""," if (insertedNode) {"," insertedNodes.push(insertedNode);",""," if (hasIndex) {"," options.index += 1;"," }"," }"," }",""," return insertedNodes;"," }",""," node = this.createNode(node);",""," if (node) {"," var index = options.index;",""," if (typeof index === 'undefined') {"," index = this._getDefaultNodeIndex(parent, node, options);"," }",""," this._fireTreeEvent(EVT_ADD, {"," index : index,"," node : node,"," parent: parent,"," src : options.src || 'insert'"," }, {"," defaultFn: this._defAddFn,"," silent : options.silent"," });"," }",""," return node;"," },",""," /**"," Prepends a node or array of nodes at the beginning of the specified parent"," node.",""," If a node being prepended is from another tree, it and all its children will"," be removed from that tree and moved to this one.",""," @method prependNode"," @param {Tree.Node} parent Parent node."," @param {Object|Object[]|Tree.Node|Tree.Node[]} node Child node,"," node config object, array of child nodes, or array of node config"," objects to prepend to the given parent. Node config objects will"," automatically be converted into node instances."," @param {Object} [options] Options."," @param {Boolean} [options.silent=false] If `true`, the `add` event will"," be suppressed."," @return {Tree.Node|Tree.Node[]} Node or array of nodes that were"," prepended."," **/"," prependNode: function (parent, node, options) {"," return this.insertNode(parent, node, Y.merge(options, {"," index: 0,"," src : 'prepend'"," }));"," },",""," /**"," Removes the specified node from its parent node. The removed node will still"," be reusable unless the `destroy` option is truthy.",""," @method removeNode"," @param {Tree.Node} node Node to remove."," @param {Object} [options] Options."," @param {Boolean} [options.destroy=false] If `true`, the node and all its"," children will also be destroyed, which makes them available for"," garbage collection and means they can't be reused."," @param {Boolean} [options.silent=false] If `true`, the `remove` event"," will be suppressed."," @param {String} [options.src] Source of the change, to be passed along"," to the event facade of the resulting event. This can be used to"," distinguish between changes triggered by a user and changes"," triggered programmatically, for example."," @return {Tree.Node} Node that was removed."," **/"," removeNode: function (node, options) {"," options || (options = {});",""," this._fireTreeEvent(EVT_REMOVE, {"," destroy: !!options.destroy,"," node : node,"," parent : node.parent,"," src : options.src || 'remove'"," }, {"," defaultFn: this._defRemoveFn,"," silent : options.silent"," });",""," return node;"," },",""," /**"," Returns the total number of nodes in this tree, at all levels.",""," Use `rootNode.children.length` to get only the number of top-level nodes.",""," @method size"," @return {Number} Total number of nodes in this tree."," **/"," size: function () {"," return this.rootNode.size() + 1;"," },",""," /**"," Serializes this tree to an object suitable for use in JSON.",""," @method toJSON"," @return {Object} Serialized tree object."," **/"," toJSON: function () {"," return this.rootNode.toJSON();"," },",""," /**"," Performs a depth-first traversal of _node_, passing it and each of its"," descendants to the specified _callback_.",""," If the callback function returns `Tree.STOP_TRAVERSAL`, traversal will be"," stopped immediately. Otherwise, it will continue until the deepest"," descendant of _node_ has been traversed, or until each branch has been"," traversed to the optional maximum depth limit.",""," Since traversal is depth-first, that means nodes are traversed like this:",""," 1"," / | \\"," 2 8 9"," / \\ \\"," 3 7 10"," / | \\ / \\"," 4 5 6 11 12",""," @method traverseNode"," @param {Tree.Node} node Node to traverse."," @param {Object} [options] Options."," @param {Number} [options.depth] Depth limit. If specified, descendants"," will only be traversed to this depth before backtracking and moving"," on."," @param {Function} callback Callback function to call with the traversed"," node and each of its descendants.",""," @param {Tree.Node} callback.node Node being traversed.",""," @param {Object} [thisObj] `this` object to use when executing _callback_."," @return {Mixed} Returns `Tree.STOP_TRAVERSAL` if traversal was stopped;"," otherwise returns `undefined`."," **/"," traverseNode: function (node, options, callback, thisObj) {"," if (node.state.destroyed) {"," Y.error('Cannot traverse a node that has been destroyed.', null, 'tree');"," return;"," }",""," // Allow callback as second argument."," if (typeof options === 'function') {"," thisObj = callback;"," callback = options;"," options = {};"," }",""," options || (options = {});",""," var stop = Tree.STOP_TRAVERSAL,"," unlimited = typeof options.depth === 'undefined';",""," if (callback.call(thisObj, node) === stop) {"," return stop;"," }",""," var children = node.children;",""," if (unlimited || options.depth > 0) {"," var childOptions = unlimited ? options : {depth: options.depth - 1};",""," for (var i = 0, len = children.length; i < len; i++) {"," if (this.traverseNode(children[i], childOptions, callback, thisObj) === stop) {"," return stop;"," }"," }"," }"," },",""," // -- Protected Methods ----------------------------------------------------",""," /**"," Moves the specified node and all its children from another tree to this"," tree.",""," @method _adoptNode"," @param {Tree.Node} node Node to adopt."," @param {Object} [options] Options to pass along to `removeNode()`."," @protected"," **/"," _adoptNode: function (node, options) {"," var oldTree = node.tree;",""," if (oldTree === this) {"," return;"," }",""," for (var i = 0, len = node.children.length; i < len; i++) {"," this._adoptNode(node.children[i], {silent: true});"," }",""," oldTree.removeNode(node, options);"," delete oldTree._nodeMap[node.id];",""," // If this node isn't an instance of this tree's composed _nodeClass,"," // then we need to recreate it to avoid potentially breaking things in"," // really weird ways."," if (!(node instanceof this._nodeClass)"," || oldTree._nodeClass !== this._nodeClass) {",""," node = this.createNode(node.toJSON());"," }",""," node.tree = this;"," this._nodeMap[node.id] = node;"," },",""," /**"," Composes a custom late-bound tree node class (if necessary) based on the"," classes specified in this Tree's `nodeClass` and `nodeExtensions`"," properties.",""," The composed class is stored in this Tree's `_nodeClass` property. If"," composition wasn't necessary, then `_nodeClass` will just be a reference to"," `nodeClass`.",""," @method _composeNodeClass"," @protected"," **/"," _composeNodeClass: function () {"," var nodeClass = this.nodeClass,"," nodeExtensions = this.nodeExtensions,"," composedClass;",""," if (typeof nodeClass === 'string') {"," // Look for a namespaced node class on `Y`."," nodeClass = Y.Object.getValue(Y, nodeClass.split('.'));",""," if (nodeClass) {"," this.nodeClass = nodeClass;"," } else {"," Y.error('Node class not found: ' + nodeClass, null, 'tree');"," return;"," }"," }",""," if (!nodeExtensions.length) {"," this._nodeClass = nodeClass;"," return;"," }",""," // Compose a new class by mixing extensions into nodeClass."," composedClass = function () {"," var extensions = composedClass._nodeExtensions;",""," nodeClass.apply(this, arguments);",""," for (var i = 0, len = extensions.length; i < len; i++) {"," extensions[i].apply(this, arguments);"," }"," };",""," Y.extend(composedClass, nodeClass);",""," for (var i = 0, len = nodeExtensions.length; i < len; i++) {"," Y.mix(composedClass.prototype, nodeExtensions[i].prototype, true);"," }",""," composedClass._nodeExtensions = nodeExtensions;"," this._nodeClass = composedClass;"," },",""," /**"," Utility method for lazily publishing and firing events.",""," @method _fireTreeEvent"," @param {String} name Event name to fire."," @param {Object} facade Event facade."," @param {Object} [options] Options."," @param {Function} [options.defaultFn] Default handler for this event."," @param {Boolean} [options.silent=false] Whether the default handler"," should be executed directly without actually firing the event."," @chainable"," @protected"," **/"," _fireTreeEvent: function (name, facade, options) {"," if (options && options.silent) {"," if (options.defaultFn) {"," facade.silent = true; // intentionally modifying the facade"," options.defaultFn.call(this, facade);"," }"," } else {"," if (options && options.defaultFn && !this._published[name]) {"," this._published[name] = this.publish(name, {"," defaultFn: options.defaultFn"," });"," }",""," this.fire(name, facade);"," }",""," return this;"," },",""," /**"," Returns the default insertion index that should be used when _node_ is"," inserted as a child of _parent_ without an explicit index.",""," The primary purpose of this method is to serve as a hook point for"," extensions and plugins that need to customize insertion order.",""," @method _getDefaultNodeIndex"," @param {Tree.Node} parent Parent node."," @param {Tree.Node} node Node being inserted."," @param {Object} [options] Options passed to `insertNode()`."," @return {Number} Index at which _node_ should be inserted into _parent_'s"," `children` array."," @protected"," **/"," _getDefaultNodeIndex: function (parent/*, node, options*/) {"," return parent.children.length;"," },",""," /**"," Removes the specified node from its parent node if it has one.",""," @method _removeNodeFromParent"," @param {Tree.Node} node Node to remove."," @protected"," **/"," _removeNodeFromParent: function (node) {"," var parent = node.parent,"," index;",""," if (parent) {"," index = parent.indexOf(node);",""," if (index > -1) {"," var children = parent.children;",""," if (index === children.length - 1) {"," children.pop();"," } else {"," children.splice(index, 1);"," parent._isIndexStale = true;"," }",""," node.parent = null;"," }"," }"," },",""," // -- Default Event Handlers -----------------------------------------------"," _defAddFn: function (e) {"," var index = e.index,"," node = e.node,"," parent = e.parent,"," oldIndex;",""," // Remove the node from its existing parent if it has one."," if (node.parent) {"," // If the node's existing parent is the same parent it's being"," // inserted into, adjust the index to avoid an off-by-one error."," if (node.parent === parent) {"," oldIndex = parent.indexOf(node);",""," if (oldIndex === index) {"," // Old index is the same as the new index, so just don't do"," // anything."," return;"," } else if (oldIndex < index) {"," // Removing the node from its old index will affect the new"," // index, so decrement the new index by one."," index -= 1;"," }"," }",""," this.removeNode(node, {"," silent: e.silent,"," src : 'add'"," });"," }",""," // Add the node to its new parent at the desired index."," node.parent = parent;"," parent.children.splice(index, 0, node);",""," parent.canHaveChildren = true;"," parent._isIndexStale = true;"," },",""," _defClearFn: function (e) {"," var newRootNode = e.rootNode;",""," if (this.rootNode) {"," this.destroyNode(this.rootNode, {silent: true});"," }",""," this._nodeMap = {};"," this._nodeMap[newRootNode.id] = newRootNode;",""," this.rootNode = newRootNode;"," this.children = newRootNode.children;"," },",""," _defRemoveFn: function (e) {"," var node = e.node;",""," if (e.destroy) {"," this.destroyNode(node, {silent: true});"," } else if (e.parent) {"," this._removeNodeFromParent(node);"," } else if (this.rootNode === node) {"," // Guess we'll need a new root node!"," this.rootNode = this.createNode(this._rootNodeConfig);"," this.children = this.rootNode.children;"," }"," }","}, {"," /**"," Return this value from a `Tree#traverseNode()` or `Tree.Node#traverse()`"," callback to immediately stop traversal.",""," @property STOP_TRAVERSAL"," @static"," **/"," STOP_TRAVERSAL: {}","});","","Y.Tree = Y.mix(Tree, Y.Tree);","","","}, '@VERSION@', {\"requires\": [\"base-build\", \"tree-node\"]});","","}());"]};
}
var __cov_E5qBLl7EORwMNPeXoqY$7g = __coverage__['build/tree/tree.js'];
__cov_E5qBLl7EORwMNPeXoqY$7g.s['1']++;YUI.add('tree',function(Y,NAME){__cov_E5qBLl7EORwMNPeXoqY$7g.f['1']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['2']++;var Lang=Y.Lang,EVT_ADD='add',EVT_CLEAR='clear',EVT_REMOVE='remove';__cov_E5qBLl7EORwMNPeXoqY$7g.s['3']++;var Tree=Y.Base.create('tree',Y.Base,[],{nodeClass:Y.Tree.Node,nodeExtensions:[],_isYUITree:true,_rootNodeConfig:{canHaveChildren:true},initializer:function(config){__cov_E5qBLl7EORwMNPeXoqY$7g.f['2']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['4']++;(__cov_E5qBLl7EORwMNPeXoqY$7g.b['1'][0]++,config)||(__cov_E5qBLl7EORwMNPeXoqY$7g.b['1'][1]++,config={});__cov_E5qBLl7EORwMNPeXoqY$7g.s['5']++;if(config.nodeClass){__cov_E5qBLl7EORwMNPeXoqY$7g.b['2'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['6']++;this.nodeClass=config.nodeClass;}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['2'][1]++;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['7']++;if(config.nodeExtensions){__cov_E5qBLl7EORwMNPeXoqY$7g.b['3'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['8']++;this.nodeExtensions=this.nodeExtensions.concat(config.nodeExtensions);}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['3'][1]++;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['9']++;(__cov_E5qBLl7EORwMNPeXoqY$7g.b['4'][0]++,this._published)||(__cov_E5qBLl7EORwMNPeXoqY$7g.b['4'][1]++,this._published={});__cov_E5qBLl7EORwMNPeXoqY$7g.s['10']++;this._nodeMap={};__cov_E5qBLl7EORwMNPeXoqY$7g.s['11']++;this.onceAfter('initializedChange',function(){__cov_E5qBLl7EORwMNPeXoqY$7g.f['3']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['12']++;this._composeNodeClass();__cov_E5qBLl7EORwMNPeXoqY$7g.s['13']++;this.clear(config.rootNode,{silent:true});__cov_E5qBLl7EORwMNPeXoqY$7g.s['14']++;if(config.nodes){__cov_E5qBLl7EORwMNPeXoqY$7g.b['5'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['15']++;this.insertNode(this.rootNode,config.nodes,{silent:true});}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['5'][1]++;}});},destructor:function(){__cov_E5qBLl7EORwMNPeXoqY$7g.f['4']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['16']++;this.destroyNode(this.rootNode,{silent:true});__cov_E5qBLl7EORwMNPeXoqY$7g.s['17']++;this.children=null;__cov_E5qBLl7EORwMNPeXoqY$7g.s['18']++;this.rootNode=null;__cov_E5qBLl7EORwMNPeXoqY$7g.s['19']++;this._nodeClass=null;__cov_E5qBLl7EORwMNPeXoqY$7g.s['20']++;this._nodeMap=null;__cov_E5qBLl7EORwMNPeXoqY$7g.s['21']++;this._published=null;},appendNode:function(parent,node,options){__cov_E5qBLl7EORwMNPeXoqY$7g.f['5']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['22']++;return this.insertNode(parent,node,Y.merge(options,{index:parent.children.length,src:'append'}));},clear:function(rootNode,options){__cov_E5qBLl7EORwMNPeXoqY$7g.f['6']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['23']++;return this._fireTreeEvent(EVT_CLEAR,{rootNode:this.createNode((__cov_E5qBLl7EORwMNPeXoqY$7g.b['6'][0]++,rootNode)||(__cov_E5qBLl7EORwMNPeXoqY$7g.b['6'][1]++,this._rootNodeConfig)),src:(__cov_E5qBLl7EORwMNPeXoqY$7g.b['7'][0]++,options)&&(__cov_E5qBLl7EORwMNPeXoqY$7g.b['7'][1]++,options.src)},{defaultFn:this._defClearFn,silent:(__cov_E5qBLl7EORwMNPeXoqY$7g.b['8'][0]++,options)&&(__cov_E5qBLl7EORwMNPeXoqY$7g.b['8'][1]++,options.silent)});},createNode:function(config){__cov_E5qBLl7EORwMNPeXoqY$7g.f['7']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['24']++;(__cov_E5qBLl7EORwMNPeXoqY$7g.b['9'][0]++,config)||(__cov_E5qBLl7EORwMNPeXoqY$7g.b['9'][1]++,config={});__cov_E5qBLl7EORwMNPeXoqY$7g.s['25']++;if(config._isYUITreeNode){__cov_E5qBLl7EORwMNPeXoqY$7g.b['10'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['26']++;if(config.state.destroyed){__cov_E5qBLl7EORwMNPeXoqY$7g.b['11'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['27']++;Y.error('Cannot insert a node that has already been destroyed.',null,'tree');__cov_E5qBLl7EORwMNPeXoqY$7g.s['28']++;return null;}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['11'][1]++;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['29']++;this._adoptNode(config);__cov_E5qBLl7EORwMNPeXoqY$7g.s['30']++;return config;}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['10'][1]++;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['31']++;if(config.children){__cov_E5qBLl7EORwMNPeXoqY$7g.b['12'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['32']++;var children=[];__cov_E5qBLl7EORwMNPeXoqY$7g.s['33']++;for(var i=0,len=config.children.length;i<len;i++){__cov_E5qBLl7EORwMNPeXoqY$7g.s['34']++;children.push(this.createNode(config.children[i]));}__cov_E5qBLl7EORwMNPeXoqY$7g.s['35']++;config=Y.merge(config,{children:children});}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['12'][1]++;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['36']++;var node=new this._nodeClass(this,config);__cov_E5qBLl7EORwMNPeXoqY$7g.s['37']++;return this._nodeMap[node.id]=node;},destroyNode:function(node,options){__cov_E5qBLl7EORwMNPeXoqY$7g.f['8']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['38']++;var child,i,len;__cov_E5qBLl7EORwMNPeXoqY$7g.s['39']++;(__cov_E5qBLl7EORwMNPeXoqY$7g.b['13'][0]++,options)||(__cov_E5qBLl7EORwMNPeXoqY$7g.b['13'][1]++,options={});__cov_E5qBLl7EORwMNPeXoqY$7g.s['40']++;for(i=0,len=node.children.length;i<len;i++){__cov_E5qBLl7EORwMNPeXoqY$7g.s['41']++;child=node.children[i];__cov_E5qBLl7EORwMNPeXoqY$7g.s['42']++;child.parent=null;__cov_E5qBLl7EORwMNPeXoqY$7g.s['43']++;this.destroyNode(child,options);}__cov_E5qBLl7EORwMNPeXoqY$7g.s['44']++;if(node.parent){__cov_E5qBLl7EORwMNPeXoqY$7g.b['14'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['45']++;this.removeNode(node,options);}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['14'][1]++;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['46']++;node.children=[];__cov_E5qBLl7EORwMNPeXoqY$7g.s['47']++;node.data={};__cov_E5qBLl7EORwMNPeXoqY$7g.s['48']++;node.state={destroyed:true};__cov_E5qBLl7EORwMNPeXoqY$7g.s['49']++;node.tree=null;__cov_E5qBLl7EORwMNPeXoqY$7g.s['50']++;node._indexMap={};__cov_E5qBLl7EORwMNPeXoqY$7g.s['51']++;delete this._nodeMap[node.id];__cov_E5qBLl7EORwMNPeXoqY$7g.s['52']++;return this;},emptyNode:function(node,options){__cov_E5qBLl7EORwMNPeXoqY$7g.f['9']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['53']++;var children=node.children,removed=[];__cov_E5qBLl7EORwMNPeXoqY$7g.s['54']++;for(var i=children.length-1;i>-1;--i){__cov_E5qBLl7EORwMNPeXoqY$7g.s['55']++;removed[i]=this.removeNode(children[i],options);}__cov_E5qBLl7EORwMNPeXoqY$7g.s['56']++;return removed;},findNode:function(node,options,callback,thisObj){__cov_E5qBLl7EORwMNPeXoqY$7g.f['10']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['57']++;var match=null;__cov_E5qBLl7EORwMNPeXoqY$7g.s['58']++;if(typeof options==='function'){__cov_E5qBLl7EORwMNPeXoqY$7g.b['15'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['59']++;thisObj=callback;__cov_E5qBLl7EORwMNPeXoqY$7g.s['60']++;callback=options;__cov_E5qBLl7EORwMNPeXoqY$7g.s['61']++;options={};}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['15'][1]++;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['62']++;this.traverseNode(node,options,function(descendant){__cov_E5qBLl7EORwMNPeXoqY$7g.f['11']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['63']++;if(callback.call(thisObj,descendant)){__cov_E5qBLl7EORwMNPeXoqY$7g.b['16'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['64']++;match=descendant;__cov_E5qBLl7EORwMNPeXoqY$7g.s['65']++;return Tree.STOP_TRAVERSAL;}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['16'][1]++;}});__cov_E5qBLl7EORwMNPeXoqY$7g.s['66']++;return match;},getNodeById:function(id){__cov_E5qBLl7EORwMNPeXoqY$7g.f['12']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['67']++;return this._nodeMap[id];},insertNode:function(parent,node,options){__cov_E5qBLl7EORwMNPeXoqY$7g.f['13']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['68']++;(__cov_E5qBLl7EORwMNPeXoqY$7g.b['17'][0]++,options)||(__cov_E5qBLl7EORwMNPeXoqY$7g.b['17'][1]++,options={});__cov_E5qBLl7EORwMNPeXoqY$7g.s['69']++;(__cov_E5qBLl7EORwMNPeXoqY$7g.b['18'][0]++,parent)||(__cov_E5qBLl7EORwMNPeXoqY$7g.b['18'][1]++,parent=this.rootNode);__cov_E5qBLl7EORwMNPeXoqY$7g.s['70']++;if((__cov_E5qBLl7EORwMNPeXoqY$7g.b['20'][0]++,'length'in node)&&(__cov_E5qBLl7EORwMNPeXoqY$7g.b['20'][1]++,Lang.isArray(node))){__cov_E5qBLl7EORwMNPeXoqY$7g.b['19'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['71']++;var hasIndex='index'in options,insertedNodes=[],insertedNode;__cov_E5qBLl7EORwMNPeXoqY$7g.s['72']++;for(var i=0,len=node.length;i<len;i++){__cov_E5qBLl7EORwMNPeXoqY$7g.s['73']++;insertedNode=this.insertNode(parent,node[i],options);__cov_E5qBLl7EORwMNPeXoqY$7g.s['74']++;if(insertedNode){__cov_E5qBLl7EORwMNPeXoqY$7g.b['21'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['75']++;insertedNodes.push(insertedNode);__cov_E5qBLl7EORwMNPeXoqY$7g.s['76']++;if(hasIndex){__cov_E5qBLl7EORwMNPeXoqY$7g.b['22'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['77']++;options.index+=1;}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['22'][1]++;}}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['21'][1]++;}}__cov_E5qBLl7EORwMNPeXoqY$7g.s['78']++;return insertedNodes;}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['19'][1]++;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['79']++;node=this.createNode(node);__cov_E5qBLl7EORwMNPeXoqY$7g.s['80']++;if(node){__cov_E5qBLl7EORwMNPeXoqY$7g.b['23'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['81']++;var index=options.index;__cov_E5qBLl7EORwMNPeXoqY$7g.s['82']++;if(typeof index==='undefined'){__cov_E5qBLl7EORwMNPeXoqY$7g.b['24'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['83']++;index=this._getDefaultNodeIndex(parent,node,options);}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['24'][1]++;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['84']++;this._fireTreeEvent(EVT_ADD,{index:index,node:node,parent:parent,src:(__cov_E5qBLl7EORwMNPeXoqY$7g.b['25'][0]++,options.src)||(__cov_E5qBLl7EORwMNPeXoqY$7g.b['25'][1]++,'insert')},{defaultFn:this._defAddFn,silent:options.silent});}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['23'][1]++;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['85']++;return node;},prependNode:function(parent,node,options){__cov_E5qBLl7EORwMNPeXoqY$7g.f['14']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['86']++;return this.insertNode(parent,node,Y.merge(options,{index:0,src:'prepend'}));},removeNode:function(node,options){__cov_E5qBLl7EORwMNPeXoqY$7g.f['15']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['87']++;(__cov_E5qBLl7EORwMNPeXoqY$7g.b['26'][0]++,options)||(__cov_E5qBLl7EORwMNPeXoqY$7g.b['26'][1]++,options={});__cov_E5qBLl7EORwMNPeXoqY$7g.s['88']++;this._fireTreeEvent(EVT_REMOVE,{destroy:!!options.destroy,node:node,parent:node.parent,src:(__cov_E5qBLl7EORwMNPeXoqY$7g.b['27'][0]++,options.src)||(__cov_E5qBLl7EORwMNPeXoqY$7g.b['27'][1]++,'remove')},{defaultFn:this._defRemoveFn,silent:options.silent});__cov_E5qBLl7EORwMNPeXoqY$7g.s['89']++;return node;},size:function(){__cov_E5qBLl7EORwMNPeXoqY$7g.f['16']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['90']++;return this.rootNode.size()+1;},toJSON:function(){__cov_E5qBLl7EORwMNPeXoqY$7g.f['17']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['91']++;return this.rootNode.toJSON();},traverseNode:function(node,options,callback,thisObj){__cov_E5qBLl7EORwMNPeXoqY$7g.f['18']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['92']++;if(node.state.destroyed){__cov_E5qBLl7EORwMNPeXoqY$7g.b['28'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['93']++;Y.error('Cannot traverse a node that has been destroyed.',null,'tree');__cov_E5qBLl7EORwMNPeXoqY$7g.s['94']++;return;}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['28'][1]++;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['95']++;if(typeof options==='function'){__cov_E5qBLl7EORwMNPeXoqY$7g.b['29'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['96']++;thisObj=callback;__cov_E5qBLl7EORwMNPeXoqY$7g.s['97']++;callback=options;__cov_E5qBLl7EORwMNPeXoqY$7g.s['98']++;options={};}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['29'][1]++;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['99']++;(__cov_E5qBLl7EORwMNPeXoqY$7g.b['30'][0]++,options)||(__cov_E5qBLl7EORwMNPeXoqY$7g.b['30'][1]++,options={});__cov_E5qBLl7EORwMNPeXoqY$7g.s['100']++;var stop=Tree.STOP_TRAVERSAL,unlimited=typeof options.depth==='undefined';__cov_E5qBLl7EORwMNPeXoqY$7g.s['101']++;if(callback.call(thisObj,node)===stop){__cov_E5qBLl7EORwMNPeXoqY$7g.b['31'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['102']++;return stop;}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['31'][1]++;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['103']++;var children=node.children;__cov_E5qBLl7EORwMNPeXoqY$7g.s['104']++;if((__cov_E5qBLl7EORwMNPeXoqY$7g.b['33'][0]++,unlimited)||(__cov_E5qBLl7EORwMNPeXoqY$7g.b['33'][1]++,options.depth>0)){__cov_E5qBLl7EORwMNPeXoqY$7g.b['32'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['105']++;var childOptions=unlimited?(__cov_E5qBLl7EORwMNPeXoqY$7g.b['34'][0]++,options):(__cov_E5qBLl7EORwMNPeXoqY$7g.b['34'][1]++,{depth:options.depth-1});__cov_E5qBLl7EORwMNPeXoqY$7g.s['106']++;for(var i=0,len=children.length;i<len;i++){__cov_E5qBLl7EORwMNPeXoqY$7g.s['107']++;if(this.traverseNode(children[i],childOptions,callback,thisObj)===stop){__cov_E5qBLl7EORwMNPeXoqY$7g.b['35'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['108']++;return stop;}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['35'][1]++;}}}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['32'][1]++;}},_adoptNode:function(node,options){__cov_E5qBLl7EORwMNPeXoqY$7g.f['19']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['109']++;var oldTree=node.tree;__cov_E5qBLl7EORwMNPeXoqY$7g.s['110']++;if(oldTree===this){__cov_E5qBLl7EORwMNPeXoqY$7g.b['36'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['111']++;return;}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['36'][1]++;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['112']++;for(var i=0,len=node.children.length;i<len;i++){__cov_E5qBLl7EORwMNPeXoqY$7g.s['113']++;this._adoptNode(node.children[i],{silent:true});}__cov_E5qBLl7EORwMNPeXoqY$7g.s['114']++;oldTree.removeNode(node,options);__cov_E5qBLl7EORwMNPeXoqY$7g.s['115']++;delete oldTree._nodeMap[node.id];__cov_E5qBLl7EORwMNPeXoqY$7g.s['116']++;if((__cov_E5qBLl7EORwMNPeXoqY$7g.b['38'][0]++,!(node instanceof this._nodeClass))||(__cov_E5qBLl7EORwMNPeXoqY$7g.b['38'][1]++,oldTree._nodeClass!==this._nodeClass)){__cov_E5qBLl7EORwMNPeXoqY$7g.b['37'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['117']++;node=this.createNode(node.toJSON());}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['37'][1]++;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['118']++;node.tree=this;__cov_E5qBLl7EORwMNPeXoqY$7g.s['119']++;this._nodeMap[node.id]=node;},_composeNodeClass:function(){__cov_E5qBLl7EORwMNPeXoqY$7g.f['20']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['120']++;var nodeClass=this.nodeClass,nodeExtensions=this.nodeExtensions,composedClass;__cov_E5qBLl7EORwMNPeXoqY$7g.s['121']++;if(typeof nodeClass==='string'){__cov_E5qBLl7EORwMNPeXoqY$7g.b['39'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['122']++;nodeClass=Y.Object.getValue(Y,nodeClass.split('.'));__cov_E5qBLl7EORwMNPeXoqY$7g.s['123']++;if(nodeClass){__cov_E5qBLl7EORwMNPeXoqY$7g.b['40'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['124']++;this.nodeClass=nodeClass;}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['40'][1]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['125']++;Y.error('Node class not found: '+nodeClass,null,'tree');__cov_E5qBLl7EORwMNPeXoqY$7g.s['126']++;return;}}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['39'][1]++;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['127']++;if(!nodeExtensions.length){__cov_E5qBLl7EORwMNPeXoqY$7g.b['41'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['128']++;this._nodeClass=nodeClass;__cov_E5qBLl7EORwMNPeXoqY$7g.s['129']++;return;}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['41'][1]++;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['130']++;composedClass=function(){__cov_E5qBLl7EORwMNPeXoqY$7g.f['21']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['131']++;var extensions=composedClass._nodeExtensions;__cov_E5qBLl7EORwMNPeXoqY$7g.s['132']++;nodeClass.apply(this,arguments);__cov_E5qBLl7EORwMNPeXoqY$7g.s['133']++;for(var i=0,len=extensions.length;i<len;i++){__cov_E5qBLl7EORwMNPeXoqY$7g.s['134']++;extensions[i].apply(this,arguments);}};__cov_E5qBLl7EORwMNPeXoqY$7g.s['135']++;Y.extend(composedClass,nodeClass);__cov_E5qBLl7EORwMNPeXoqY$7g.s['136']++;for(var i=0,len=nodeExtensions.length;i<len;i++){__cov_E5qBLl7EORwMNPeXoqY$7g.s['137']++;Y.mix(composedClass.prototype,nodeExtensions[i].prototype,true);}__cov_E5qBLl7EORwMNPeXoqY$7g.s['138']++;composedClass._nodeExtensions=nodeExtensions;__cov_E5qBLl7EORwMNPeXoqY$7g.s['139']++;this._nodeClass=composedClass;},_fireTreeEvent:function(name,facade,options){__cov_E5qBLl7EORwMNPeXoqY$7g.f['22']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['140']++;if((__cov_E5qBLl7EORwMNPeXoqY$7g.b['43'][0]++,options)&&(__cov_E5qBLl7EORwMNPeXoqY$7g.b['43'][1]++,options.silent)){__cov_E5qBLl7EORwMNPeXoqY$7g.b['42'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['141']++;if(options.defaultFn){__cov_E5qBLl7EORwMNPeXoqY$7g.b['44'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['142']++;facade.silent=true;__cov_E5qBLl7EORwMNPeXoqY$7g.s['143']++;options.defaultFn.call(this,facade);}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['44'][1]++;}}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['42'][1]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['144']++;if((__cov_E5qBLl7EORwMNPeXoqY$7g.b['46'][0]++,options)&&(__cov_E5qBLl7EORwMNPeXoqY$7g.b['46'][1]++,options.defaultFn)&&(__cov_E5qBLl7EORwMNPeXoqY$7g.b['46'][2]++,!this._published[name])){__cov_E5qBLl7EORwMNPeXoqY$7g.b['45'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['145']++;this._published[name]=this.publish(name,{defaultFn:options.defaultFn});}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['45'][1]++;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['146']++;this.fire(name,facade);}__cov_E5qBLl7EORwMNPeXoqY$7g.s['147']++;return this;},_getDefaultNodeIndex:function(parent){__cov_E5qBLl7EORwMNPeXoqY$7g.f['23']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['148']++;return parent.children.length;},_removeNodeFromParent:function(node){__cov_E5qBLl7EORwMNPeXoqY$7g.f['24']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['149']++;var parent=node.parent,index;__cov_E5qBLl7EORwMNPeXoqY$7g.s['150']++;if(parent){__cov_E5qBLl7EORwMNPeXoqY$7g.b['47'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['151']++;index=parent.indexOf(node);__cov_E5qBLl7EORwMNPeXoqY$7g.s['152']++;if(index>-1){__cov_E5qBLl7EORwMNPeXoqY$7g.b['48'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['153']++;var children=parent.children;__cov_E5qBLl7EORwMNPeXoqY$7g.s['154']++;if(index===children.length-1){__cov_E5qBLl7EORwMNPeXoqY$7g.b['49'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['155']++;children.pop();}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['49'][1]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['156']++;children.splice(index,1);__cov_E5qBLl7EORwMNPeXoqY$7g.s['157']++;parent._isIndexStale=true;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['158']++;node.parent=null;}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['48'][1]++;}}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['47'][1]++;}},_defAddFn:function(e){__cov_E5qBLl7EORwMNPeXoqY$7g.f['25']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['159']++;var index=e.index,node=e.node,parent=e.parent,oldIndex;__cov_E5qBLl7EORwMNPeXoqY$7g.s['160']++;if(node.parent){__cov_E5qBLl7EORwMNPeXoqY$7g.b['50'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['161']++;if(node.parent===parent){__cov_E5qBLl7EORwMNPeXoqY$7g.b['51'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['162']++;oldIndex=parent.indexOf(node);__cov_E5qBLl7EORwMNPeXoqY$7g.s['163']++;if(oldIndex===index){__cov_E5qBLl7EORwMNPeXoqY$7g.b['52'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['164']++;return;}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['52'][1]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['165']++;if(oldIndex<index){__cov_E5qBLl7EORwMNPeXoqY$7g.b['53'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['166']++;index-=1;}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['53'][1]++;}}}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['51'][1]++;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['167']++;this.removeNode(node,{silent:e.silent,src:'add'});}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['50'][1]++;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['168']++;node.parent=parent;__cov_E5qBLl7EORwMNPeXoqY$7g.s['169']++;parent.children.splice(index,0,node);__cov_E5qBLl7EORwMNPeXoqY$7g.s['170']++;parent.canHaveChildren=true;__cov_E5qBLl7EORwMNPeXoqY$7g.s['171']++;parent._isIndexStale=true;},_defClearFn:function(e){__cov_E5qBLl7EORwMNPeXoqY$7g.f['26']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['172']++;var newRootNode=e.rootNode;__cov_E5qBLl7EORwMNPeXoqY$7g.s['173']++;if(this.rootNode){__cov_E5qBLl7EORwMNPeXoqY$7g.b['54'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['174']++;this.destroyNode(this.rootNode,{silent:true});}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['54'][1]++;}__cov_E5qBLl7EORwMNPeXoqY$7g.s['175']++;this._nodeMap={};__cov_E5qBLl7EORwMNPeXoqY$7g.s['176']++;this._nodeMap[newRootNode.id]=newRootNode;__cov_E5qBLl7EORwMNPeXoqY$7g.s['177']++;this.rootNode=newRootNode;__cov_E5qBLl7EORwMNPeXoqY$7g.s['178']++;this.children=newRootNode.children;},_defRemoveFn:function(e){__cov_E5qBLl7EORwMNPeXoqY$7g.f['27']++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['179']++;var node=e.node;__cov_E5qBLl7EORwMNPeXoqY$7g.s['180']++;if(e.destroy){__cov_E5qBLl7EORwMNPeXoqY$7g.b['55'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['181']++;this.destroyNode(node,{silent:true});}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['55'][1]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['182']++;if(e.parent){__cov_E5qBLl7EORwMNPeXoqY$7g.b['56'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['183']++;this._removeNodeFromParent(node);}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['56'][1]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['184']++;if(this.rootNode===node){__cov_E5qBLl7EORwMNPeXoqY$7g.b['57'][0]++;__cov_E5qBLl7EORwMNPeXoqY$7g.s['185']++;this.rootNode=this.createNode(this._rootNodeConfig);__cov_E5qBLl7EORwMNPeXoqY$7g.s['186']++;this.children=this.rootNode.children;}else{__cov_E5qBLl7EORwMNPeXoqY$7g.b['57'][1]++;}}}}},{STOP_TRAVERSAL:{}});__cov_E5qBLl7EORwMNPeXoqY$7g.s['187']++;Y.Tree=Y.mix(Tree,Y.Tree);},'@VERSION@',{'requires':['base-build','tree-node']});
|
PypiClean
|
/casai-home-frontend-20220503.0.tar.gz/casai-home-frontend-20220503.0/hass_frontend/frontend_es5/7651b3d3.js
|
(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([[60929],{22098:function(e,t,r){"use strict";var n,i,o,a,s=r(50424),c=r(55358);function l(e){return(l="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function f(e,t){return t||(t=e.slice(0)),Object.freeze(Object.defineProperties(e,{raw:{value:Object.freeze(t)}}))}function u(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}function d(e,t){return(d=Object.setPrototypeOf||function(e,t){return e.__proto__=t,e})(e,t)}function p(e){var t=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],(function(){}))),!0}catch(e){return!1}}();return function(){var r,n=y(e);if(t){var i=y(this).constructor;r=Reflect.construct(n,arguments,i)}else r=n.apply(this,arguments);return h(this,r)}}function h(e,t){return!t||"object"!==l(t)&&"function"!=typeof t?m(e):t}function m(e){if(void 0===e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return e}function y(e){return(y=Object.setPrototypeOf?Object.getPrototypeOf:function(e){return e.__proto__||Object.getPrototypeOf(e)})(e)}function v(){v=function(){return e};var e={elementsDefinitionOrder:[["method"],["field"]],initializeInstanceElements:function(e,t){["method","field"].forEach((function(r){t.forEach((function(t){t.kind===r&&"own"===t.placement&&this.defineClassElement(e,t)}),this)}),this)},initializeClassElements:function(e,t){var r=e.prototype;["method","field"].forEach((function(n){t.forEach((function(t){var i=t.placement;if(t.kind===n&&("static"===i||"prototype"===i)){var o="static"===i?e:r;this.defineClassElement(o,t)}}),this)}),this)},defineClassElement:function(e,t){var r=t.descriptor;if("field"===t.kind){var n=t.initializer;r={enumerable:r.enumerable,writable:r.writable,configurable:r.configurable,value:void 0===n?void 0:n.call(e)}}Object.defineProperty(e,t.key,r)},decorateClass:function(e,t){var r=[],n=[],i={static:[],prototype:[],own:[]};if(e.forEach((function(e){this.addElementPlacement(e,i)}),this),e.forEach((function(e){if(!g(e))return r.push(e);var t=this.decorateElement(e,i);r.push(t.element),r.push.apply(r,t.extras),n.push.apply(n,t.finishers)}),this),!t)return{elements:r,finishers:n};var o=this.decorateConstructor(r,t);return n.push.apply(n,o.finishers),o.finishers=n,o},addElementPlacement:function(e,t,r){var n=t[e.placement];if(!r&&-1!==n.indexOf(e.key))throw new TypeError("Duplicated element ("+e.key+")");n.push(e.key)},decorateElement:function(e,t){for(var r=[],n=[],i=e.decorators,o=i.length-1;o>=0;o--){var a=t[e.placement];a.splice(a.indexOf(e.key),1);var s=this.fromElementDescriptor(e),c=this.toElementFinisherExtras((0,i[o])(s)||s);e=c.element,this.addElementPlacement(e,t),c.finisher&&n.push(c.finisher);var l=c.extras;if(l){for(var f=0;f<l.length;f++)this.addElementPlacement(l[f],t);r.push.apply(r,l)}}return{element:e,finishers:n,extras:r}},decorateConstructor:function(e,t){for(var r=[],n=t.length-1;n>=0;n--){var i=this.fromClassDescriptor(e),o=this.toClassDescriptor((0,t[n])(i)||i);if(void 0!==o.finisher&&r.push(o.finisher),void 0!==o.elements){e=o.elements;for(var a=0;a<e.length-1;a++)for(var s=a+1;s<e.length;s++)if(e[a].key===e[s].key&&e[a].placement===e[s].placement)throw new TypeError("Duplicated element ("+e[a].key+")")}}return{elements:e,finishers:r}},fromElementDescriptor:function(e){var t={kind:e.kind,key:e.key,placement:e.placement,descriptor:e.descriptor};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),"field"===e.kind&&(t.initializer=e.initializer),t},toElementDescriptors:function(e){var t;if(void 0!==e)return(t=e,function(e){if(Array.isArray(e))return e}(t)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(t)||function(e,t){if(e){if("string"==typeof e)return P(e,t);var r=Object.prototype.toString.call(e).slice(8,-1);return"Object"===r&&e.constructor&&(r=e.constructor.name),"Map"===r||"Set"===r?Array.from(e):"Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r)?P(e,t):void 0}}(t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()).map((function(e){var t=this.toElementDescriptor(e);return this.disallowProperty(e,"finisher","An element descriptor"),this.disallowProperty(e,"extras","An element descriptor"),t}),this)},toElementDescriptor:function(e){var t=String(e.kind);if("method"!==t&&"field"!==t)throw new TypeError('An element descriptor\'s .kind property must be either "method" or "field", but a decorator created an element descriptor with .kind "'+t+'"');var r=x(e.key),n=String(e.placement);if("static"!==n&&"prototype"!==n&&"own"!==n)throw new TypeError('An element descriptor\'s .placement property must be one of "static", "prototype" or "own", but a decorator created an element descriptor with .placement "'+n+'"');var i=e.descriptor;this.disallowProperty(e,"elements","An element descriptor");var o={kind:t,key:r,placement:n,descriptor:Object.assign({},i)};return"field"!==t?this.disallowProperty(e,"initializer","A method descriptor"):(this.disallowProperty(i,"get","The property descriptor of a field descriptor"),this.disallowProperty(i,"set","The property descriptor of a field descriptor"),this.disallowProperty(i,"value","The property descriptor of a field descriptor"),o.initializer=e.initializer),o},toElementFinisherExtras:function(e){return{element:this.toElementDescriptor(e),finisher:E(e,"finisher"),extras:this.toElementDescriptors(e.extras)}},fromClassDescriptor:function(e){var t={kind:"class",elements:e.map(this.fromElementDescriptor,this)};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),t},toClassDescriptor:function(e){var t=String(e.kind);if("class"!==t)throw new TypeError('A class descriptor\'s .kind property must be "class", but a decorator created a class descriptor with .kind "'+t+'"');this.disallowProperty(e,"key","A class descriptor"),this.disallowProperty(e,"placement","A class descriptor"),this.disallowProperty(e,"descriptor","A class descriptor"),this.disallowProperty(e,"initializer","A class descriptor"),this.disallowProperty(e,"extras","A class descriptor");var r=E(e,"finisher");return{elements:this.toElementDescriptors(e.elements),finisher:r}},runClassFinishers:function(e,t){for(var r=0;r<t.length;r++){var n=(0,t[r])(e);if(void 0!==n){if("function"!=typeof n)throw new TypeError("Finishers must return a constructor.");e=n}}return e},disallowProperty:function(e,t,r){if(void 0!==e[t])throw new TypeError(r+" can't have a ."+t+" property.")}};return e}function b(e){var t,r=x(e.key);"method"===e.kind?t={value:e.value,writable:!0,configurable:!0,enumerable:!1}:"get"===e.kind?t={get:e.value,configurable:!0,enumerable:!1}:"set"===e.kind?t={set:e.value,configurable:!0,enumerable:!1}:"field"===e.kind&&(t={configurable:!0,writable:!0,enumerable:!0});var n={kind:"field"===e.kind?"field":"method",key:r,placement:e.static?"static":"field"===e.kind?"own":"prototype",descriptor:t};return e.decorators&&(n.decorators=e.decorators),"field"===e.kind&&(n.initializer=e.value),n}function w(e,t){void 0!==e.descriptor.get?t.descriptor.get=e.descriptor.get:t.descriptor.set=e.descriptor.set}function g(e){return e.decorators&&e.decorators.length}function k(e){return void 0!==e&&!(void 0===e.value&&void 0===e.writable)}function E(e,t){var r=e[t];if(void 0!==r&&"function"!=typeof r)throw new TypeError("Expected '"+t+"' to be a function");return r}function x(e){var t=function(e,t){if("object"!==l(e)||null===e)return e;var r=e[Symbol.toPrimitive];if(void 0!==r){var n=r.call(e,t||"default");if("object"!==l(n))return n;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===t?String:Number)(e)}(e,"string");return"symbol"===l(t)?t:String(t)}function P(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,n=new Array(t);r<t;r++)n[r]=e[r];return n}!function(e,t,r,n){var i=v();if(n)for(var o=0;o<n.length;o++)i=n[o](i);var a=t((function(e){i.initializeInstanceElements(e,s.elements)}),r),s=i.decorateClass(function(e){for(var t=[],r=function(e){return"method"===e.kind&&e.key===o.key&&e.placement===o.placement},n=0;n<e.length;n++){var i,o=e[n];if("method"===o.kind&&(i=t.find(r)))if(k(o.descriptor)||k(i.descriptor)){if(g(o)||g(i))throw new ReferenceError("Duplicated methods ("+o.key+") can't be decorated.");i.descriptor=o.descriptor}else{if(g(o)){if(g(i))throw new ReferenceError("Decorators can't be placed on different accessors with for the same property ("+o.key+").");i.decorators=o.decorators}w(o,i)}else t.push(o)}return t}(a.d.map(b)),e);i.initializeClassElements(a.F,s.elements),i.runClassFinishers(a.F,s.finishers)}([(0,c.Mo)("ha-card")],(function(e,t){return{F:function(t){!function(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),t&&d(e,t)}(n,t);var r=p(n);function n(){var t;u(this,n);for(var i=arguments.length,o=new Array(i),a=0;a<i;a++)o[a]=arguments[a];return t=r.call.apply(r,[this].concat(o)),e(m(t)),t}return n}(t),d:[{kind:"field",decorators:[(0,c.Cb)()],key:"header",value:void 0},{kind:"field",decorators:[(0,c.Cb)({type:Boolean,reflect:!0})],key:"outlined",value:function(){return!1}},{kind:"get",static:!0,key:"styles",value:function(){return(0,s.iv)(n||(n=f(["\n :host {\n background: var(\n --ha-card-background,\n var(--card-background-color, white)\n );\n border-radius: var(--ha-card-border-radius, 4px);\n box-shadow: var(\n --ha-card-box-shadow,\n 0px 2px 1px -1px rgba(0, 0, 0, 0.2),\n 0px 1px 1px 0px rgba(0, 0, 0, 0.14),\n 0px 1px 3px 0px rgba(0, 0, 0, 0.12)\n );\n color: var(--primary-text-color);\n display: block;\n transition: all 0.3s ease-out;\n position: relative;\n }\n\n :host([outlined]) {\n box-shadow: none;\n border-width: var(--ha-card-border-width, 1px);\n border-style: solid;\n border-color: var(\n --ha-card-border-color,\n var(--divider-color, #e0e0e0)\n );\n }\n\n .card-header,\n :host ::slotted(.card-header) {\n color: var(--ha-card-header-color, --primary-text-color);\n font-family: var(--ha-card-header-font-family, inherit);\n font-size: var(--ha-card-header-font-size, 24px);\n letter-spacing: -0.012em;\n line-height: 48px;\n padding: 12px 16px 16px;\n display: block;\n margin-block-start: 0px;\n margin-block-end: 0px;\n font-weight: normal;\n }\n\n :host ::slotted(.card-content:not(:first-child)),\n slot:not(:first-child)::slotted(.card-content) {\n padding-top: 0px;\n margin-top: -8px;\n }\n\n :host ::slotted(.card-content) {\n padding: 16px;\n }\n\n :host ::slotted(.card-actions) {\n border-top: 1px solid var(--divider-color, #e8e8e8);\n padding: 5px 16px;\n }\n "])))}},{kind:"method",key:"render",value:function(){return(0,s.dy)(i||(i=f(["\n ","\n <slot></slot>\n "])),this.header?(0,s.dy)(o||(o=f(['<h1 class="card-header">',"</h1>"])),this.header):(0,s.dy)(a||(a=f([""]))))}}]}}),s.oi)},99282:function(e,t,r){"use strict";var n=r(52039);function i(e){return(i="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function o(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}function a(e,t){for(var r=0;r<t.length;r++){var n=t[r];n.enumerable=n.enumerable||!1,n.configurable=!0,"value"in n&&(n.writable=!0),Object.defineProperty(e,n.key,n)}}function s(e,t,r){return(s="undefined"!=typeof Reflect&&Reflect.get?Reflect.get:function(e,t,r){var n=function(e,t){for(;!Object.prototype.hasOwnProperty.call(e,t)&&null!==(e=u(e)););return e}(e,t);if(n){var i=Object.getOwnPropertyDescriptor(n,t);return i.get?i.get.call(r):i.value}})(e,t,r||e)}function c(e,t){return(c=Object.setPrototypeOf||function(e,t){return e.__proto__=t,e})(e,t)}function l(e){var t=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],(function(){}))),!0}catch(e){return!1}}();return function(){var r,n=u(e);if(t){var i=u(this).constructor;r=Reflect.construct(n,arguments,i)}else r=n.apply(this,arguments);return f(this,r)}}function f(e,t){return!t||"object"!==i(t)&&"function"!=typeof t?function(e){if(void 0===e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return e}(e):t}function u(e){return(u=Object.setPrototypeOf?Object.getPrototypeOf:function(e){return e.__proto__||Object.getPrototypeOf(e)})(e)}var d=function(e){!function(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),t&&c(e,t)}(f,e);var t,r,n,i=l(f);function f(){return o(this,f),i.apply(this,arguments)}return t=f,(r=[{key:"connectedCallback",value:function(){var e=this;s(u(f.prototype),"connectedCallback",this).call(this),setTimeout((function(){e.path="ltr"===window.getComputedStyle(e).direction?"M8.59,16.58L13.17,12L8.59,7.41L10,6L16,12L10,18L8.59,16.58Z":"M15.41,16.58L10.83,12L15.41,7.41L14,6L8,12L14,18L15.41,16.58Z"}),100)}}])&&a(t.prototype,r),n&&a(t,n),f}(n.C);customElements.define("ha-icon-next",d)},91810:function(e,t,r){"use strict";r.d(t,{YJ:function(){return n},ID:function(){return i},Kk:function(){return o},$c:function(){return a},WI:function(){return s},vY:function(){return c},uZ:function(){return l},cC:function(){return f},e8:function(){return u},Jl:function(){return d},Lm:function(){return p},ol:function(){return h},x1:function(){return m}});var n=["ProtocolInfo","Probe","WakeUp","ManufacturerSpecific1","NodeInfo","NodePlusInfo","ManufacturerSpecific2","Versions","Instances","Static","CacheLoad","Associations","Neighbors","Session","Dynamic","Configuration","Complete"],i=["driverAllNodesQueried","driverAllNodesQueriedSomeDead","driverAwakeNodesQueried"],o=["starting","started","Ready","driverReady"],a=["Offline","stopped","driverFailed","driverReset","driverRemoved","driverAllNodesOnFire"],s=function(e){if(e){var t=e.identifiers.find((function(e){return"ozw"===e[0]}));if(t){var r=t[1].split(".");return{node_id:parseInt(r[1]),ozw_instance:parseInt(r[0])}}}},c=function(e){return e.callWS({type:"ozw/get_instances"})},l=function(e,t){return e.callWS({type:"ozw/network_status",ozw_instance:t})},f=function(e,t){return e.callWS({type:"ozw/network_statistics",ozw_instance:t})},u=function(e,t){return e.callWS({type:"ozw/get_nodes",ozw_instance:t})},d=function(e,t,r){return e.callWS({type:"ozw/node_status",ozw_instance:t,node_id:r})},p=function(e,t,r){return e.callWS({type:"ozw/node_metadata",ozw_instance:t,node_id:r})},h=function(e,t,r){return e.callWS({type:"ozw/get_config_parameters",ozw_instance:t,node_id:r})},m=function(e){var t=!(arguments.length>1&&void 0!==arguments[1])||arguments[1];return e.callWS({type:"ozw/migrate_zwave",dry_run:t})}},88165:function(e,t,r){"use strict";var n,i,o=r(50424),a=r(55358),s=r(76666);function c(e){return(c="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function l(e,t){return t||(t=e.slice(0)),Object.freeze(Object.defineProperties(e,{raw:{value:Object.freeze(t)}}))}function f(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}function u(e,t){return(u=Object.setPrototypeOf||function(e,t){return e.__proto__=t,e})(e,t)}function d(e){var t=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],(function(){}))),!0}catch(e){return!1}}();return function(){var r,n=m(e);if(t){var i=m(this).constructor;r=Reflect.construct(n,arguments,i)}else r=n.apply(this,arguments);return p(this,r)}}function p(e,t){return!t||"object"!==c(t)&&"function"!=typeof t?h(e):t}function h(e){if(void 0===e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return e}function m(e){return(m=Object.setPrototypeOf?Object.getPrototypeOf:function(e){return e.__proto__||Object.getPrototypeOf(e)})(e)}function y(){y=function(){return e};var e={elementsDefinitionOrder:[["method"],["field"]],initializeInstanceElements:function(e,t){["method","field"].forEach((function(r){t.forEach((function(t){t.kind===r&&"own"===t.placement&&this.defineClassElement(e,t)}),this)}),this)},initializeClassElements:function(e,t){var r=e.prototype;["method","field"].forEach((function(n){t.forEach((function(t){var i=t.placement;if(t.kind===n&&("static"===i||"prototype"===i)){var o="static"===i?e:r;this.defineClassElement(o,t)}}),this)}),this)},defineClassElement:function(e,t){var r=t.descriptor;if("field"===t.kind){var n=t.initializer;r={enumerable:r.enumerable,writable:r.writable,configurable:r.configurable,value:void 0===n?void 0:n.call(e)}}Object.defineProperty(e,t.key,r)},decorateClass:function(e,t){var r=[],n=[],i={static:[],prototype:[],own:[]};if(e.forEach((function(e){this.addElementPlacement(e,i)}),this),e.forEach((function(e){if(!w(e))return r.push(e);var t=this.decorateElement(e,i);r.push(t.element),r.push.apply(r,t.extras),n.push.apply(n,t.finishers)}),this),!t)return{elements:r,finishers:n};var o=this.decorateConstructor(r,t);return n.push.apply(n,o.finishers),o.finishers=n,o},addElementPlacement:function(e,t,r){var n=t[e.placement];if(!r&&-1!==n.indexOf(e.key))throw new TypeError("Duplicated element ("+e.key+")");n.push(e.key)},decorateElement:function(e,t){for(var r=[],n=[],i=e.decorators,o=i.length-1;o>=0;o--){var a=t[e.placement];a.splice(a.indexOf(e.key),1);var s=this.fromElementDescriptor(e),c=this.toElementFinisherExtras((0,i[o])(s)||s);e=c.element,this.addElementPlacement(e,t),c.finisher&&n.push(c.finisher);var l=c.extras;if(l){for(var f=0;f<l.length;f++)this.addElementPlacement(l[f],t);r.push.apply(r,l)}}return{element:e,finishers:n,extras:r}},decorateConstructor:function(e,t){for(var r=[],n=t.length-1;n>=0;n--){var i=this.fromClassDescriptor(e),o=this.toClassDescriptor((0,t[n])(i)||i);if(void 0!==o.finisher&&r.push(o.finisher),void 0!==o.elements){e=o.elements;for(var a=0;a<e.length-1;a++)for(var s=a+1;s<e.length;s++)if(e[a].key===e[s].key&&e[a].placement===e[s].placement)throw new TypeError("Duplicated element ("+e[a].key+")")}}return{elements:e,finishers:r}},fromElementDescriptor:function(e){var t={kind:e.kind,key:e.key,placement:e.placement,descriptor:e.descriptor};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),"field"===e.kind&&(t.initializer=e.initializer),t},toElementDescriptors:function(e){var t;if(void 0!==e)return(t=e,function(e){if(Array.isArray(e))return e}(t)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(t)||function(e,t){if(e){if("string"==typeof e)return x(e,t);var r=Object.prototype.toString.call(e).slice(8,-1);return"Object"===r&&e.constructor&&(r=e.constructor.name),"Map"===r||"Set"===r?Array.from(e):"Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r)?x(e,t):void 0}}(t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()).map((function(e){var t=this.toElementDescriptor(e);return this.disallowProperty(e,"finisher","An element descriptor"),this.disallowProperty(e,"extras","An element descriptor"),t}),this)},toElementDescriptor:function(e){var t=String(e.kind);if("method"!==t&&"field"!==t)throw new TypeError('An element descriptor\'s .kind property must be either "method" or "field", but a decorator created an element descriptor with .kind "'+t+'"');var r=E(e.key),n=String(e.placement);if("static"!==n&&"prototype"!==n&&"own"!==n)throw new TypeError('An element descriptor\'s .placement property must be one of "static", "prototype" or "own", but a decorator created an element descriptor with .placement "'+n+'"');var i=e.descriptor;this.disallowProperty(e,"elements","An element descriptor");var o={kind:t,key:r,placement:n,descriptor:Object.assign({},i)};return"field"!==t?this.disallowProperty(e,"initializer","A method descriptor"):(this.disallowProperty(i,"get","The property descriptor of a field descriptor"),this.disallowProperty(i,"set","The property descriptor of a field descriptor"),this.disallowProperty(i,"value","The property descriptor of a field descriptor"),o.initializer=e.initializer),o},toElementFinisherExtras:function(e){return{element:this.toElementDescriptor(e),finisher:k(e,"finisher"),extras:this.toElementDescriptors(e.extras)}},fromClassDescriptor:function(e){var t={kind:"class",elements:e.map(this.fromElementDescriptor,this)};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),t},toClassDescriptor:function(e){var t=String(e.kind);if("class"!==t)throw new TypeError('A class descriptor\'s .kind property must be "class", but a decorator created a class descriptor with .kind "'+t+'"');this.disallowProperty(e,"key","A class descriptor"),this.disallowProperty(e,"placement","A class descriptor"),this.disallowProperty(e,"descriptor","A class descriptor"),this.disallowProperty(e,"initializer","A class descriptor"),this.disallowProperty(e,"extras","A class descriptor");var r=k(e,"finisher");return{elements:this.toElementDescriptors(e.elements),finisher:r}},runClassFinishers:function(e,t){for(var r=0;r<t.length;r++){var n=(0,t[r])(e);if(void 0!==n){if("function"!=typeof n)throw new TypeError("Finishers must return a constructor.");e=n}}return e},disallowProperty:function(e,t,r){if(void 0!==e[t])throw new TypeError(r+" can't have a ."+t+" property.")}};return e}function v(e){var t,r=E(e.key);"method"===e.kind?t={value:e.value,writable:!0,configurable:!0,enumerable:!1}:"get"===e.kind?t={get:e.value,configurable:!0,enumerable:!1}:"set"===e.kind?t={set:e.value,configurable:!0,enumerable:!1}:"field"===e.kind&&(t={configurable:!0,writable:!0,enumerable:!0});var n={kind:"field"===e.kind?"field":"method",key:r,placement:e.static?"static":"field"===e.kind?"own":"prototype",descriptor:t};return e.decorators&&(n.decorators=e.decorators),"field"===e.kind&&(n.initializer=e.value),n}function b(e,t){void 0!==e.descriptor.get?t.descriptor.get=e.descriptor.get:t.descriptor.set=e.descriptor.set}function w(e){return e.decorators&&e.decorators.length}function g(e){return void 0!==e&&!(void 0===e.value&&void 0===e.writable)}function k(e,t){var r=e[t];if(void 0!==r&&"function"!=typeof r)throw new TypeError("Expected '"+t+"' to be a function");return r}function E(e){var t=function(e,t){if("object"!==c(e)||null===e)return e;var r=e[Symbol.toPrimitive];if(void 0!==r){var n=r.call(e,t||"default");if("object"!==c(n))return n;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===t?String:Number)(e)}(e,"string");return"symbol"===c(t)?t:String(t)}function x(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,n=new Array(t);r<t;r++)n[r]=e[r];return n}!function(e,t,r,n){var i=y();if(n)for(var o=0;o<n.length;o++)i=n[o](i);var a=t((function(e){i.initializeInstanceElements(e,s.elements)}),r),s=i.decorateClass(function(e){for(var t=[],r=function(e){return"method"===e.kind&&e.key===o.key&&e.placement===o.placement},n=0;n<e.length;n++){var i,o=e[n];if("method"===o.kind&&(i=t.find(r)))if(g(o.descriptor)||g(i.descriptor)){if(w(o)||w(i))throw new ReferenceError("Duplicated methods ("+o.key+") can't be decorated.");i.descriptor=o.descriptor}else{if(w(o)){if(w(i))throw new ReferenceError("Decorators can't be placed on different accessors with for the same property ("+o.key+").");i.decorators=o.decorators}b(o,i)}else t.push(o)}return t}(a.d.map(v)),e);i.initializeClassElements(a.F,s.elements),i.runClassFinishers(a.F,s.finishers)}([(0,a.Mo)("ha-config-section")],(function(e,t){return{F:function(t){!function(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),t&&u(e,t)}(n,t);var r=d(n);function n(){var t;f(this,n);for(var i=arguments.length,o=new Array(i),a=0;a<i;a++)o[a]=arguments[a];return t=r.call.apply(r,[this].concat(o)),e(h(t)),t}return n}(t),d:[{kind:"field",decorators:[(0,a.Cb)()],key:"isWide",value:function(){return!1}},{kind:"field",decorators:[(0,a.Cb)({type:Boolean})],key:"vertical",value:function(){return!1}},{kind:"method",key:"render",value:function(){return(0,o.dy)(n||(n=l(['\n <div\n class="content ','"\n >\n <div class="header"><slot name="header"></slot></div>\n <div\n class="together layout ','"\n >\n <div class="intro"><slot name="introduction"></slot></div>\n <div class="panel flex-auto"><slot></slot></div>\n </div>\n </div>\n '])),(0,s.$)({narrow:!this.isWide}),(0,s.$)({narrow:!this.isWide,vertical:this.vertical||!this.isWide,horizontal:!this.vertical&&this.isWide}))}},{kind:"get",static:!0,key:"styles",value:function(){return(0,o.iv)(i||(i=l(["\n :host {\n display: block;\n }\n .content {\n padding: 28px 20px 0;\n max-width: 1040px;\n margin: 0 auto;\n }\n\n .layout {\n display: flex;\n }\n\n .horizontal {\n flex-direction: row;\n }\n\n .vertical {\n flex-direction: column;\n }\n\n .flex-auto {\n flex: 1 1 auto;\n }\n\n .header {\n font-family: var(--paper-font-headline_-_font-family);\n -webkit-font-smoothing: var(\n --paper-font-headline_-_-webkit-font-smoothing\n );\n font-size: var(--paper-font-headline_-_font-size);\n font-weight: var(--paper-font-headline_-_font-weight);\n letter-spacing: var(--paper-font-headline_-_letter-spacing);\n line-height: var(--paper-font-headline_-_line-height);\n opacity: var(--dark-primary-opacity);\n }\n\n .together {\n margin-top: 32px;\n }\n\n .intro {\n font-family: var(--paper-font-subhead_-_font-family);\n -webkit-font-smoothing: var(\n --paper-font-subhead_-_-webkit-font-smoothing\n );\n font-weight: var(--paper-font-subhead_-_font-weight);\n line-height: var(--paper-font-subhead_-_line-height);\n width: 100%;\n opacity: var(--dark-primary-opacity);\n font-size: 14px;\n padding-bottom: 20px;\n }\n\n .horizontal .intro {\n max-width: 400px;\n margin-right: 40px;\n }\n\n .panel {\n margin-top: -24px;\n }\n\n .panel ::slotted(*) {\n margin-top: 24px;\n display: block;\n }\n\n .narrow.content {\n max-width: 640px;\n }\n .narrow .together {\n margin-top: 20px;\n }\n .narrow .intro {\n padding-bottom: 20px;\n margin-right: 0;\n max-width: 500px;\n }\n "])))}}]}}),o.oi)},60929:function(e,t,r){"use strict";r.r(t),r.d(t,{ozwTabs:function(){return D}});r(53918),r(25782),r(89194);var n,i,o,a,s,c,l=r(50424),f=r(55358),u=r(83849),d=(r(22098),r(99282),r(91810)),p=(r(48811),r(15291),r(1359),r(11654));r(88165);function h(e){return(h="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function m(e,t,r,n,i,o,a){try{var s=e[o](a),c=s.value}catch(l){return void r(l)}s.done?t(c):Promise.resolve(c).then(n,i)}function y(e,t){return t||(t=e.slice(0)),Object.freeze(Object.defineProperties(e,{raw:{value:Object.freeze(t)}}))}function v(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}function b(e,t){return(b=Object.setPrototypeOf||function(e,t){return e.__proto__=t,e})(e,t)}function w(e){var t=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],(function(){}))),!0}catch(e){return!1}}();return function(){var r,n=E(e);if(t){var i=E(this).constructor;r=Reflect.construct(n,arguments,i)}else r=n.apply(this,arguments);return g(this,r)}}function g(e,t){return!t||"object"!==h(t)&&"function"!=typeof t?k(e):t}function k(e){if(void 0===e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return e}function E(e){return(E=Object.setPrototypeOf?Object.getPrototypeOf:function(e){return e.__proto__||Object.getPrototypeOf(e)})(e)}function x(){x=function(){return e};var e={elementsDefinitionOrder:[["method"],["field"]],initializeInstanceElements:function(e,t){["method","field"].forEach((function(r){t.forEach((function(t){t.kind===r&&"own"===t.placement&&this.defineClassElement(e,t)}),this)}),this)},initializeClassElements:function(e,t){var r=e.prototype;["method","field"].forEach((function(n){t.forEach((function(t){var i=t.placement;if(t.kind===n&&("static"===i||"prototype"===i)){var o="static"===i?e:r;this.defineClassElement(o,t)}}),this)}),this)},defineClassElement:function(e,t){var r=t.descriptor;if("field"===t.kind){var n=t.initializer;r={enumerable:r.enumerable,writable:r.writable,configurable:r.configurable,value:void 0===n?void 0:n.call(e)}}Object.defineProperty(e,t.key,r)},decorateClass:function(e,t){var r=[],n=[],i={static:[],prototype:[],own:[]};if(e.forEach((function(e){this.addElementPlacement(e,i)}),this),e.forEach((function(e){if(!O(e))return r.push(e);var t=this.decorateElement(e,i);r.push(t.element),r.push.apply(r,t.extras),n.push.apply(n,t.finishers)}),this),!t)return{elements:r,finishers:n};var o=this.decorateConstructor(r,t);return n.push.apply(n,o.finishers),o.finishers=n,o},addElementPlacement:function(e,t,r){var n=t[e.placement];if(!r&&-1!==n.indexOf(e.key))throw new TypeError("Duplicated element ("+e.key+")");n.push(e.key)},decorateElement:function(e,t){for(var r=[],n=[],i=e.decorators,o=i.length-1;o>=0;o--){var a=t[e.placement];a.splice(a.indexOf(e.key),1);var s=this.fromElementDescriptor(e),c=this.toElementFinisherExtras((0,i[o])(s)||s);e=c.element,this.addElementPlacement(e,t),c.finisher&&n.push(c.finisher);var l=c.extras;if(l){for(var f=0;f<l.length;f++)this.addElementPlacement(l[f],t);r.push.apply(r,l)}}return{element:e,finishers:n,extras:r}},decorateConstructor:function(e,t){for(var r=[],n=t.length-1;n>=0;n--){var i=this.fromClassDescriptor(e),o=this.toClassDescriptor((0,t[n])(i)||i);if(void 0!==o.finisher&&r.push(o.finisher),void 0!==o.elements){e=o.elements;for(var a=0;a<e.length-1;a++)for(var s=a+1;s<e.length;s++)if(e[a].key===e[s].key&&e[a].placement===e[s].placement)throw new TypeError("Duplicated element ("+e[a].key+")")}}return{elements:e,finishers:r}},fromElementDescriptor:function(e){var t={kind:e.kind,key:e.key,placement:e.placement,descriptor:e.descriptor};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),"field"===e.kind&&(t.initializer=e.initializer),t},toElementDescriptors:function(e){var t;if(void 0!==e)return(t=e,function(e){if(Array.isArray(e))return e}(t)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(t)||function(e,t){if(e){if("string"==typeof e)return A(e,t);var r=Object.prototype.toString.call(e).slice(8,-1);return"Object"===r&&e.constructor&&(r=e.constructor.name),"Map"===r||"Set"===r?Array.from(e):"Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r)?A(e,t):void 0}}(t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()).map((function(e){var t=this.toElementDescriptor(e);return this.disallowProperty(e,"finisher","An element descriptor"),this.disallowProperty(e,"extras","An element descriptor"),t}),this)},toElementDescriptor:function(e){var t=String(e.kind);if("method"!==t&&"field"!==t)throw new TypeError('An element descriptor\'s .kind property must be either "method" or "field", but a decorator created an element descriptor with .kind "'+t+'"');var r=C(e.key),n=String(e.placement);if("static"!==n&&"prototype"!==n&&"own"!==n)throw new TypeError('An element descriptor\'s .placement property must be one of "static", "prototype" or "own", but a decorator created an element descriptor with .placement "'+n+'"');var i=e.descriptor;this.disallowProperty(e,"elements","An element descriptor");var o={kind:t,key:r,placement:n,descriptor:Object.assign({},i)};return"field"!==t?this.disallowProperty(e,"initializer","A method descriptor"):(this.disallowProperty(i,"get","The property descriptor of a field descriptor"),this.disallowProperty(i,"set","The property descriptor of a field descriptor"),this.disallowProperty(i,"value","The property descriptor of a field descriptor"),o.initializer=e.initializer),o},toElementFinisherExtras:function(e){return{element:this.toElementDescriptor(e),finisher:z(e,"finisher"),extras:this.toElementDescriptors(e.extras)}},fromClassDescriptor:function(e){var t={kind:"class",elements:e.map(this.fromElementDescriptor,this)};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),t},toClassDescriptor:function(e){var t=String(e.kind);if("class"!==t)throw new TypeError('A class descriptor\'s .kind property must be "class", but a decorator created a class descriptor with .kind "'+t+'"');this.disallowProperty(e,"key","A class descriptor"),this.disallowProperty(e,"placement","A class descriptor"),this.disallowProperty(e,"descriptor","A class descriptor"),this.disallowProperty(e,"initializer","A class descriptor"),this.disallowProperty(e,"extras","A class descriptor");var r=z(e,"finisher");return{elements:this.toElementDescriptors(e.elements),finisher:r}},runClassFinishers:function(e,t){for(var r=0;r<t.length;r++){var n=(0,t[r])(e);if(void 0!==n){if("function"!=typeof n)throw new TypeError("Finishers must return a constructor.");e=n}}return e},disallowProperty:function(e,t,r){if(void 0!==e[t])throw new TypeError(r+" can't have a ."+t+" property.")}};return e}function P(e){var t,r=C(e.key);"method"===e.kind?t={value:e.value,writable:!0,configurable:!0,enumerable:!1}:"get"===e.kind?t={get:e.value,configurable:!0,enumerable:!1}:"set"===e.kind?t={set:e.value,configurable:!0,enumerable:!1}:"field"===e.kind&&(t={configurable:!0,writable:!0,enumerable:!0});var n={kind:"field"===e.kind?"field":"method",key:r,placement:e.static?"static":"field"===e.kind?"own":"prototype",descriptor:t};return e.decorators&&(n.decorators=e.decorators),"field"===e.kind&&(n.initializer=e.value),n}function _(e,t){void 0!==e.descriptor.get?t.descriptor.get=e.descriptor.get:t.descriptor.set=e.descriptor.set}function O(e){return e.decorators&&e.decorators.length}function S(e){return void 0!==e&&!(void 0===e.value&&void 0===e.writable)}function z(e,t){var r=e[t];if(void 0!==r&&"function"!=typeof r)throw new TypeError("Expected '"+t+"' to be a function");return r}function C(e){var t=function(e,t){if("object"!==h(e)||null===e)return e;var r=e[Symbol.toPrimitive];if(void 0!==r){var n=r.call(e,t||"default");if("object"!==h(n))return n;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===t?String:Number)(e)}(e,"string");return"symbol"===h(t)?t:String(t)}function A(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,n=new Array(t);r<t;r++)n[r]=e[r];return n}var D=[];!function(e,t,r,n){var i=x();if(n)for(var o=0;o<n.length;o++)i=n[o](i);var a=t((function(e){i.initializeInstanceElements(e,s.elements)}),r),s=i.decorateClass(function(e){for(var t=[],r=function(e){return"method"===e.kind&&e.key===o.key&&e.placement===o.placement},n=0;n<e.length;n++){var i,o=e[n];if("method"===o.kind&&(i=t.find(r)))if(S(o.descriptor)||S(i.descriptor)){if(O(o)||O(i))throw new ReferenceError("Duplicated methods ("+o.key+") can't be decorated.");i.descriptor=o.descriptor}else{if(O(o)){if(O(i))throw new ReferenceError("Decorators can't be placed on different accessors with for the same property ("+o.key+").");i.decorators=o.decorators}_(o,i)}else t.push(o)}return t}(a.d.map(P)),e);i.initializeClassElements(a.F,s.elements),i.runClassFinishers(a.F,s.finishers)}([(0,f.Mo)("ozw-config-dashboard")],(function(e,t){var r,h;return{F:function(t){!function(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),t&&b(e,t)}(n,t);var r=w(n);function n(){var t;v(this,n);for(var i=arguments.length,o=new Array(i),a=0;a<i;a++)o[a]=arguments[a];return t=r.call.apply(r,[this].concat(o)),e(k(t)),t}return n}(t),d:[{kind:"field",decorators:[(0,f.Cb)({type:Object})],key:"hass",value:void 0},{kind:"field",decorators:[(0,f.Cb)({type:Object})],key:"route",value:void 0},{kind:"field",decorators:[(0,f.Cb)({type:Boolean})],key:"narrow",value:void 0},{kind:"field",decorators:[(0,f.Cb)({type:Boolean})],key:"isWide",value:void 0},{kind:"field",decorators:[(0,f.Cb)()],key:"configEntryId",value:void 0},{kind:"field",decorators:[(0,f.SB)()],key:"_instances",value:void 0},{kind:"method",key:"firstUpdated",value:function(){this._fetchData()}},{kind:"method",key:"render",value:function(){var e=this;return this._instances?0===this._instances.length?(0,l.dy)(i||(i=y(["<hass-error-screen\n .hass=","\n .error=","\n ></hass-error-screen>"])),this.hass,this.hass.localize("ui.panel.config.ozw.select_instance.none_found")):(0,l.dy)(o||(o=y(["\n <hass-tabs-subpage\n .hass=","\n .narrow=","\n .route=","\n .tabs=",'\n back-path="/config/integrations"\n >\n <ha-config-section .narrow='," .isWide=",'>\n <div slot="header">\n ','\n </div>\n\n <div slot="introduction">\n ',"\n </div>\n ","\n </ha-config-section>\n </hass-tabs-subpage>\n "])),this.hass,this.narrow,this.route,D,this.narrow,this.isWide,this.hass.localize("ui.panel.config.ozw.select_instance.header"),this.hass.localize("ui.panel.config.ozw.select_instance.introduction"),this._instances.length>0?(0,l.dy)(a||(a=y(["\n ","\n "])),this._instances.map((function(t){var r="unknown",n="M12,2A10,10 0 0,0 2,12A10,10 0 0,0 12,22A10,10 0 0,0 22,12A10,10 0 0,0 12,2Z";return d.ID.includes(t.Status)&&(r="online",n="M12 2C6.5 2 2 6.5 2 12S6.5 22 12 22 22 17.5 22 12 17.5 2 12 2M10 17L5 12L6.41 10.59L10 14.17L17.59 6.58L19 8L10 17Z"),d.Kk.includes(t.Status)&&(r="starting"),d.$c.includes(t.Status)&&(r="offline",n="M12,2C17.53,2 22,6.47 22,12C22,17.53 17.53,22 12,22C6.47,22 2,17.53 2,12C2,6.47 6.47,2 12,2M15.59,7L12,10.59L8.41,7L7,8.41L10.59,12L7,15.59L8.41,17L12,13.41L15.59,17L17,15.59L13.41,12L17,8.41L15.59,7Z"),(0,l.dy)(s||(s=y(['\n <ha-card>\n <a\n href="/config/ozw/network/','"\n aria-role="option"\n tabindex="-1"\n >\n <paper-icon-item>\n <ha-svg-icon .path=',' slot="item-icon">\n </ha-svg-icon>\n <paper-item-body>\n ',"\n ","\n <div secondary>\n <ha-svg-icon\n .path=",'\n class="network-status-icon ','"\n ></ha-svg-icon>\n ',"\n -\n ","<br />\n ","\n : ","<br />\n OZWDaemon "," (OpenZWave\n ",")\n </div>\n </paper-item-body>\n <ha-icon-next></ha-icon-next>\n </paper-icon-item>\n </a>\n </ha-card>\n "])),t.ozw_instance,"M16.3,10.58C13.14,10.58 10.6,13.13 10.6,16.28C10.6,19.43 13.15,22 16.3,22C19.45,22 22,19.43 22,16.28C22,13.13 19.45,10.58 16.3,10.58M18,19.08H13.19L15.81,15H13.31L14.4,13.23H19.18L16.63,17.28H19.18L18,19.08M16.3,3.93V2C8.41,2 2,8.42 2,16.31H3.92C3.94,9.46 9.5,3.93 16.3,3.93M16.3,7.74V5.82C10.5,5.82 5.81,10.53 5.81,16.31H7.73C7.75,11.58 11.59,7.74 16.3,7.74",e.hass.localize("ui.panel.config.ozw.common.instance"),t.ozw_instance,n,r,e.hass.localize("ui.panel.config.ozw.network_status."+r),e.hass.localize("ui.panel.config.ozw.network_status.details."+t.Status.toLowerCase()),e.hass.localize("ui.panel.config.ozw.common.controller"),t.getControllerPath,t.OZWDaemon_Version,t.OpenZWave_Version)}))):""):(0,l.dy)(n||(n=y(["<hass-loading-screen></hass-loading-screen>"])))}},{kind:"method",key:"_fetchData",value:(r=regeneratorRuntime.mark((function e(){return regeneratorRuntime.wrap((function(e){for(;;)switch(e.prev=e.next){case 0:return e.next=2,(0,d.vY)(this.hass);case 2:this._instances=e.sent,1===this._instances.length&&(0,u.c)("/config/ozw/network/".concat(this._instances[0].ozw_instance),{replace:!0});case 4:case"end":return e.stop()}}),e,this)})),h=function(){var e=this,t=arguments;return new Promise((function(n,i){var o=r.apply(e,t);function a(e){m(o,n,i,a,s,"next",e)}function s(e){m(o,n,i,a,s,"throw",e)}a(void 0)}))},function(){return h.apply(this,arguments)})},{kind:"get",static:!0,key:"styles",value:function(){return[p.Qx,(0,l.iv)(c||(c=y(['\n ha-card:last-child {\n margin-bottom: 24px;\n }\n ha-config-section {\n margin-top: -12px;\n }\n :host([narrow]) ha-config-section {\n margin-top: -20px;\n }\n ha-card {\n overflow: hidden;\n }\n ha-card a {\n text-decoration: none;\n color: var(--primary-text-color);\n }\n paper-item-body {\n margin: 16px 0;\n }\n a {\n text-decoration: none;\n color: var(--primary-text-color);\n position: relative;\n display: block;\n outline: 0;\n }\n ha-svg-icon.network-status-icon {\n height: 14px;\n width: 14px;\n }\n .online {\n color: green;\n }\n .starting {\n color: orange;\n }\n .offline {\n color: red;\n }\n ha-svg-icon,\n ha-icon-next {\n color: var(--secondary-text-color);\n }\n .iron-selected paper-item::before,\n a:not(.iron-selected):focus::before {\n position: absolute;\n top: 0;\n right: 0;\n bottom: 0;\n left: 0;\n pointer-events: none;\n content: "";\n transition: opacity 15ms linear;\n will-change: opacity;\n }\n a:not(.iron-selected):focus::before {\n background-color: currentColor;\n opacity: var(--dark-divider-opacity);\n }\n .iron-selected paper-item:focus::before,\n .iron-selected:focus paper-item::before {\n opacity: 0.2;\n }\n '])))]}}]}}),l.oi)}}]);
//# sourceMappingURL=7651b3d3.js.map
|
PypiClean
|
/alipay_sdk_python-3.6.740-py3-none-any.whl/alipay/aop/api/request/AlipayFundCouponWufuCostassetsQueryRequest.py
|
import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.AlipayFundCouponWufuCostassetsQueryModel import AlipayFundCouponWufuCostassetsQueryModel
class AlipayFundCouponWufuCostassetsQueryRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, AlipayFundCouponWufuCostassetsQueryModel):
self._biz_content = value
else:
self._biz_content = AlipayFundCouponWufuCostassetsQueryModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'alipay.fund.coupon.wufu.costassets.query'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
|
PypiClean
|
/azure_mgmt_kubernetesconfiguration-3.0.0-py3-none-any.whl/azure/mgmt/kubernetesconfiguration/v2022_03_01/aio/operations/_flux_config_operation_status_operations.py
|
from typing import Any, Callable, Dict, Optional, TypeVar
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._flux_config_operation_status_operations import build_get_request
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class FluxConfigOperationStatusOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.kubernetesconfiguration.v2022_03_01.aio.SourceControlConfigurationClient`'s
:attr:`flux_config_operation_status` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace_async
async def get(
self,
resource_group_name: str,
cluster_rp: str,
cluster_resource_name: str,
cluster_name: str,
flux_configuration_name: str,
operation_id: str,
**kwargs: Any
) -> _models.OperationStatusResult:
"""Get Async Operation status.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService,
Microsoft.Kubernetes, Microsoft.HybridContainerService. Required.
:type cluster_rp: str
:param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters,
connectedClusters, provisionedClusters. Required.
:type cluster_resource_name: str
:param cluster_name: The name of the kubernetes cluster. Required.
:type cluster_name: str
:param flux_configuration_name: Name of the Flux Configuration. Required.
:type flux_configuration_name: str
:param operation_id: operation Id. Required.
:type operation_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: OperationStatusResult or the result of cls(response)
:rtype: ~azure.mgmt.kubernetesconfiguration.v2022_03_01.models.OperationStatusResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-03-01"))
cls: ClsType[_models.OperationStatusResult] = kwargs.pop("cls", None)
request = build_get_request(
resource_group_name=resource_group_name,
cluster_rp=cluster_rp,
cluster_resource_name=cluster_resource_name,
cluster_name=cluster_name,
flux_configuration_name=flux_configuration_name,
operation_id=operation_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("OperationStatusResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/fluxConfigurations/{fluxConfigurationName}/operations/{operationId}"
}
|
PypiClean
|
/dune_xt-2022.1.4.601-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl/dune/xt/common/vtk/reader.py
|
from pathlib import Path
from xml.etree.ElementTree import fromstring
from collections import OrderedDict
from xmljson import BadgerFish
import vtk
from lxml import etree
def _read_collection(xml):
collection = xml['VTKFile']['Collection']
files = collection['DataSet']
data = [(f['@timestep'], _read_single(f['@file'])) for f in files]
data.sort(key=lambda t: t[0])
return data
def _read_single(filename, vtk_type=None):
vtk_type = vtk_type or _get_vtk_type(filename)
if vtk_type == 'UnstructuredGrid':
reader = vtk.vtkXMLUnstructuredGridReader()
else:
raise NotImplementedError(f"VTK Files of type {vtk_type} can not yet be processed")
reader.SetFileName(filename)
reader.Update()
geometryFilter = vtk.vtkGeometryFilter()
geometryFilter.SetInputData(reader.GetOutput())
geometryFilter.Update()
return geometryFilter.GetOutput()
def _get_collection_data(filename):
path = Path(filename)
assert path.is_file()
bf = BadgerFish(dict_type=OrderedDict)
return path, bf.data(fromstring(open(path, 'rb').read()))
def _get_vtk_type(path):
'''We use the incremental event emitting parser
here since we can expect to encounter appended binary data in the xml
which lxml cannot parse.
:param path: vtk file to peek into
:return: None if no VTKFile element found, ellse the type attribute of the VTKFile element
'''
parser = etree.XMLPullParser(events=('start',))
with open(path, 'rb') as xml:
for lines in xml.readlines():
parser.feed(lines)
for action, element in parser.read_events():
if element.tag == 'VTKFile':
return element.get('type')
return None
def read_vtkfile(filename):
vtk_type = _get_vtk_type(filename)
if vtk_type == 'Collection':
path, xml = _get_collection_data(filename)
return _read_collection(xml)
return [
[0., _read_single(filename, vtk_type)],
]
|
PypiClean
|
/ccc-coef-0.1.6.tar.gz/ccc-coef-0.1.6/README.md
|
# Clustermatch Correlation Coefficient (CCC)
[](https://github.com/greenelab/ccc/actions/workflows/pytest.yaml)
[](https://codecov.io/gh/greenelab/ccc)
[](https://doi.org/10.1101/2022.06.15.496326)
[](https://greenelab.github.io/ccc-manuscript/)
## Overview
The Clustermatch Correlation Coefficient (CCC) is a highly-efficient, next-generation not-only-linear correlation coefficient that can work on numerical and categorical data types.
This repository contains the code of CCC and instructions to install and use it.
It also has all the scripts/notebooks to run the analyses for the [manuscript](https://github.com/greenelab/ccc-manuscript), where we applied CCC on gene expression data.
## Installation
CCC is available as a PyPI (Python) package (`ccc-coef`). We tested CCC in Python 3.9+, but it should work on prior 3.x versions.
You can quickly test it by creating a conda environment and then install it with `pip`:
```bash
# ipython and pandas are used in the following examples, but they are not needed for CCC to work
conda create -y -n ccc-env python=3.9 ipython pandas
conda activate ccc-env
pip install ccc-coef
```
## Usage
Run `ipython` in your terminal:
```bash
$ ipython
Python 3.10.4 (main, Mar 31 2022, 08:41:55) [GCC 7.5.0]
Type 'copyright', 'credits' or 'license' for more information
IPython 8.3.0 -- An enhanced Interactive Python. Type '?' for help.
In [1]:
```
When computing the correlation coefficient on a pair of features, CCC supports `numpy.array` or `pandas.Series`.
This is an example with numerical data (you can copy/paste the entire lines below including `In [...]`):
```python
In [1]: import numpy as np
In [2]: import pandas as pd
In [3]: from ccc.coef import ccc
In [4]: random_feature1 = np.random.rand(1000)
In [5]: random_feature2 = np.random.rand(1000)
In [6]: ccc(random_feature1, random_feature2)
Out[6]: 0.0018815884476534295
In [7]: random_feature1 = pd.Series(random_feature1)
In [8]: random_feature2 = pd.Series(random_feature2)
In [9]: ccc(random_feature1, random_feature2)
Out[9]: 0.0018815884476534295
```
CCC always returns a value between zero (no relationship) and one (perfect relationship).
As we show in the manuscript, the distribution of CCC values is much more skewed than other coefficients like Pearson's or Spearman's.
A comparison between these coefficients should account for that.
You can also mix numerical and categorical data:
```python
In [10]: categories = np.array(["blue", "red", "green", "yellow"])
In [11]: categorical_random_feature1 = np.random.choice(categories, size=1000)
In [12]: categorical_random_feature2 = np.random.choice(categories, size=1000)
In [13]: categorical_random_feature2[:10]
Out[13]:
array(['yellow', 'red', 'red', 'yellow', 'blue', 'blue', 'red', 'yellow',
'green', 'blue'], dtype='<U6')
In [14]: ccc(categorical_random_feature1, categorical_random_feature2)
Out[14]: 0.0009263483455638076
In [15]: ccc(random_feature1, categorical_random_feature2)
Out[15]: 0.0015123522641692117
```
The first argument of `ccc` could also be a matrix, either as a `numpy.array` (features are in rows and objects in columns) or as a `pandas.DataFrame` (objects are in rows and features in columns).
In this case, `ccc` will compute the pairwise correlation across all features:
```python
In [16]: # with a numpy.array
In [17]: data = np.random.rand(10, 1000)
In [18]: c = ccc(data)
In [19]: c.shape
Out[19]: (45,)
In [20]: c[:10]
Out[20]:
array([0.00404461, 0.00185342, 0.00248847, 0.00232761, 0.00260786,
0.00121495, 0.00227679, 0.00099051, 0.00313611, 0.00323936])
In [21]: # with a pandas.DataFrame
In [22]: data_df = pd.DataFrame(data.T)
In [23]: c = ccc(data_df)
In [24]: c.shape
Out[24]: (45,)
In [25]: c[:10]
Out[25]:
array([0.00404461, 0.00185342, 0.00248847, 0.00232761, 0.00260786,
0.00121495, 0.00227679, 0.00099051, 0.00313611, 0.00323936])
```
If your data has a mix of numerical and categorical features, it's better to work on a `pandas.DataFrame`.
As an example, we load the [Titanic dataset](https://www.kaggle.com/c/titanic/data) (from [seaborn](https://github.com/mwaskom/seaborn-data/)'s repository):
```python
In [26]: titanic_url = "https://raw.githubusercontent.com/mwaskom/seaborn-data/master/raw/titanic.csv"
In [27]: titanic_df = pd.read_csv(titanic_url)
In [28]: titanic_df.shape
Out[28]: (891, 11)
In [29]: titanic_df.head()
Out[29]:
survived pclass name sex age sibsp parch ticket fare cabin embarked
0 0 3 Braund, Mr. Owen Harris male 22.0 1 0 A/5 21171 7.2500 NaN S
1 1 1 Cumings, Mrs. John Bradley (Florence Briggs Th... female 38.0 1 0 PC 17599 71.2833 C85 C
2 1 3 Heikkinen, Miss. Laina female 26.0 0 0 STON/O2. 3101282 7.9250 NaN S
3 1 1 Futrelle, Mrs. Jacques Heath (Lily May Peel) female 35.0 1 0 113803 53.1000 C123 S
4 0 3 Allen, Mr. William Henry male 35.0 0 0 373450 8.0500 NaN S
```
The Titanic dataset has missing values:
```python
In [30]: titanic_df.isna().sum()
Out[30]:
survived 0
pclass 0
name 0
sex 0
age 177
sibsp 0
parch 0
ticket 0
fare 0
cabin 687
embarked 2
dtype: int64
```
So we need some kind of preprocessing before moving on:
```python
In [31]: titanic_df = titanic_df.dropna(subset=["embarked"]).dropna(axis=1)
In [32]: titanic_df.shape
Out[32]: (889, 9)
```
Now we can run CCC on the dataset and get a correlation matrix across features:
```python
In [33]: ccc_corrs = ccc(titanic_df)
In [34]: from scipy.spatial.distance import squareform
In [35]: ccc_corrs = squareform(ccc_corrs)
In [36]: np.fill_diagonal(ccc_corrs, 1.0)
In [37]: ccc_corrs = pd.DataFrame(ccc_corrs, index=titanic_df.columns.tolist(), columns=titanic_df.columns.tolist())
In [38]: ccc_corrs.shape
Out[38]: (9, 9)
In [39]: with pd.option_context('display.float_format', '{:,.2f}'.format):
...: display(ccc_corrs)
survived pclass name sex sibsp parch ticket fare embarked
survived 1.00 0.12 0.00 0.32 0.04 0.05 0.00 0.07 0.05
pclass 0.12 1.00 0.00 0.04 0.02 0.01 0.00 0.33 0.01
name 0.00 0.00 1.00 0.00 0.00 0.00 0.00 0.00 0.00
sex 0.32 0.04 0.00 1.00 0.08 0.11 0.00 0.04 0.04
sibsp 0.04 0.02 0.00 0.08 1.00 0.29 0.00 0.23 0.00
parch 0.05 0.01 0.00 0.11 0.29 1.00 0.00 0.14 0.00
ticket 0.00 0.00 0.00 0.00 0.00 0.00 1.00 0.02 0.00
fare 0.07 0.33 0.00 0.04 0.23 0.14 0.02 1.00 0.03
embarked 0.05 0.01 0.00 0.04 0.00 0.00 0.00 0.03 1.00
```
The `ccc` function also has a `n_jobs` parameter that allows to control the number of CPU cores used.
Below we compute the pairwise correlation between 20 features across 1000 objects:
```python
In [40]: data = np.random.rand(20, 1000)
In [41]: %timeit ccc(data, n_jobs=1)
1.32 s ± 45.8 ms per loop (mean ± std. dev. of 7 runs, 1 loop each)
In [42]: %timeit ccc(data, n_jobs=2)
771 ms ± 11 ms per loop (mean ± std. dev. of 7 runs, 1 loop each)
```
## Reproducible research
Below we provide the steps to reproduce all the analyses in the CCC manuscript.
### Setup
To prepare the environment to run the analyses in the manuscript, follow the steps in [environment](environment/).
After completing those steps, you'll have the source code in this repository, a Python environment (either using a Docker image or creating your own conda environment) and the necessary data to run the analyses.
### Running code
All the analyses are written as Jupyter notebooks and stored in the folder `nbs/`.
All notebooks are organized by directories, such as `01_preprocessing`, with file names that indicate the order in which they should be run (if they share the prefix, then it means they can be run in parallel).
You can run the analyses either using the JupyterLab server and your browser, or from the command line using [papermill](https://papermill.readthedocs.io/en/latest/).
**Using the browser.** For example, let's say you want to run the preprocessing notebooks.
If you want to use your browser, you first need to start the JupyterLab server:
```bash
bash scripts/run_nbs_server.sh
```
and then go to http://127.0.0.1:8893/ and browse to `nbs/05_preprocessing`.
Then you need to run each notebook in order.
If you use the Docker image, the steps are very similar for any command, but you need to prepend the `scripts/run_docker.sh` script.
```bash
bash scripts/run_docker.sh \
bash scripts/run_nbs_server.sh --container-mode
```
Note that the port is different: http://127.0.0.1:8888/
**Using the command-line.** You can also run the notebooks using the command-line with papermill instead of going to the browser.
Using as example the same preprocessing notebooks, you can pick one of these commands to run all the preprocessing notebooks in order:
```bash
# using your own conda environment:
# requires GNU Parallel: https://www.gnu.org/software/parallel/
# To install in Ubuntu: apt install parallel
parallel \
-k \
--lb \
--halt 2 \
-j1 \
'bash nbs/run_nbs.sh {}' ::: nbs/05_preprocessing/*.ipynb
# using the Docker image:
bash scripts/run_docker.sh \
parallel \
-k \
--lb \
--halt 2 \
-j1 \
'bash nbs/run_nbs.sh {}' ::: nbs/05_preprocessing/*.ipynb
```
|
PypiClean
|
/avrzero-0.0.3.tar.gz/avrzero-0.0.3/.github/ISSUE_TEMPLATE/bug_report.md
|
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See the error
**Expected behavior**
A clear and concise description of what you expect to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem. Please only use screenshots for GUI instead of stack trace etc.
**Device (please complete the following information):**
- OS: [e.g. iOS]
- Browser [e.g. chrome, safari]
- Version [e.g. 22]
**Additional context**
Add any other context about the problem here.
|
PypiClean
|
/hat-trie-python-0.6.0.tar.gz/hat-trie-python-0.6.0/README.md
|
# hat-trie-python
Python wrapper for https://github.com/Tessil/hat-trie (MIT License)
It's still incomplete, not all functionality is supported. Sets are not implemented yet.
Tested on Linux 2.7+, Windows 3.5+, MacOS 2.7+. It should work on Windows 2.7 also, however it's tricky to compile.
## Install
- `pip install hattrie` (requires Cython and a C++11 compiler)
## Example usage:
```
from hattrie import HatTrieMap
htm = HatTrieMap()
htm[b"/foo"] = b"1"
htm[b"/foo/bar"] = b"2"
print(list(htm.longest_prefix(b"/foo"))) # returns [(b'/foo', b'1')]
print(list(htm.longest_prefix(b"/foo/baz"))) # returns [(b'/foo', b'1')]
print(list(htm.longest_prefix(b"/foo/bar/baz"))) # returns [(b'/foo/bar', b'2'), (b'/foo', b'1')]
print(list(htm.longest_prefix(b"/foo/bar/"))) # returns [(b'/foo/bar', b'2'), (b'/foo', b'1')]
print(list(htm.longest_prefix(b"/bar"))) # returns []
print(list(htm.longest_prefix(b""))) # returns []
```
Any Python object is supported as value, however only bytes are supported as keys.
|
PypiClean
|
/mb-commons-1.1.8.tar.gz/mb-commons-1.1.8/mb_commons/mongo.py
|
from __future__ import annotations
from collections import OrderedDict
from dataclasses import dataclass
from decimal import Decimal
from typing import Any, Generic, Optional, Tuple, Type, TypeVar, Union
from urllib.parse import urlparse
from bson import CodecOptions, Decimal128, ObjectId
from bson.codec_options import TypeCodec, TypeRegistry
from pydantic import BaseModel
from pymongo import ASCENDING, DESCENDING, IndexModel, MongoClient
from pymongo.collection import ReturnDocument
from pymongo.database import Database
from pymongo.results import DeleteResult, InsertManyResult, InsertOneResult, UpdateResult
class ObjectIdStr(str):
@classmethod
def __get_validators__(cls):
yield cls.validate
@classmethod
def validate(cls, v):
return str(v)
class PropertyBaseModel(BaseModel):
"""
Workaround for serializing properties with pydantic until
https://github.com/samuelcolvin/pydantic/issues/935
is solved
"""
@classmethod
def get_properties(cls):
return [prop for prop in dir(cls) if isinstance(getattr(cls, prop), property) and prop not in ("__values__", "fields")]
def dict(
self,
*,
include=None,
exclude=None,
by_alias: bool = False,
skip_defaults: bool = None,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
):
attribs = super().dict(
include=include,
exclude=exclude,
by_alias=by_alias,
skip_defaults=skip_defaults,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
props = self.get_properties()
# Include and exclude properties
if include:
props = [prop for prop in props if prop in include]
if exclude:
props = [prop for prop in props if prop not in exclude]
# Update the attribute dict with the properties
if props:
attribs.update({prop: getattr(self, prop) for prop in props})
return attribs
class MongoModel(PropertyBaseModel):
__collection__: str = ""
__validator__: Optional[dict] = None
__indexes__: list[Union[IndexModel, str]] = []
def to_doc(self) -> dict:
doc = self.dict()
if doc["id"] is not None:
doc["_id"] = doc["id"]
del doc["id"]
return doc
@classmethod
def init_collection(cls, database: Database) -> MongoCollection[T]:
return MongoCollection.init(database, cls)
class DecimalCodec(TypeCodec):
python_type = Decimal
bson_type = Decimal128
def transform_python(self, value):
return Decimal128(value)
def transform_bson(self, value):
return value.to_decimal()
class MongoNotFoundError(Exception):
def __init__(self, pk):
self.pk = pk
super().__init__(f"mongo document not found: {pk}")
@dataclass
class MongoConnection:
client: MongoClient
database: Database
@staticmethod
def connect(url: str) -> MongoConnection:
client = MongoClient(url)
database_name = MongoConnection.get_database_name_from_url(url)
database = client[database_name]
return MongoConnection(client=client, database=database)
@staticmethod
def get_database_name_from_url(db_url: str) -> str:
return urlparse(db_url).path[1:]
T = TypeVar("T", bound=MongoModel)
SortType = Optional[Union[list[Tuple[str, int]], str]]
QueryType = dict[str, Any]
PKType = Union[str, ObjectIdStr, int]
class MongoCollection(Generic[T]):
def __init__(self, model: Type[T], database: Database, wrap_object_str_id: bool = True):
if not model.__collection__:
raise Exception("empty collection name")
codecs = CodecOptions(type_registry=TypeRegistry([c() for c in [DecimalCodec]]))
self.collection = database.get_collection(model.__collection__, codecs)
if model.__indexes__:
indexes = [parse_str_index_model(i) if isinstance(i, str) else i for i in model.__indexes__]
self.collection.create_indexes(indexes)
self.model_class = model
self.wrap_object_id = model.__fields__["id"].type_ == ObjectIdStr and wrap_object_str_id
if model.__validator__:
# if collection exists
if model.__collection__ in database.list_collection_names():
query = [("collMod", model.__collection__), ("validator", model.__validator__)]
res = database.command(OrderedDict(query))
if "ok" not in res:
raise Exception("can't set schema validator")
else:
database.create_collection(model.__collection__, codec_options=codecs, validator=model.__validator__)
def insert_one(self, doc: T) -> InsertOneResult:
return self.collection.insert_one(doc.to_doc())
def insert_many(self, docs: list[T], ordered=True) -> InsertManyResult:
return self.collection.insert_many([obj.dict() for obj in docs], ordered=ordered)
def get_or_none(self, pk: PKType) -> Optional[T]:
res = self.collection.find_one({"_id": self._pk(pk)})
if res:
return self.model_class(**res)
def get(self, pk: PKType) -> T:
res = self.get_or_none(pk)
if not res:
raise MongoNotFoundError(pk)
return res
def find(self, query: QueryType, sort: SortType = None, limit: int = 0) -> list[T]:
return [self.model_class(**d) for d in self.collection.find(query, sort=self._sort(sort), limit=limit)]
def find_one(self, query: QueryType, sort: SortType = None) -> Optional[T]:
res = self.collection.find_one(query, sort=self._sort(sort))
if res:
return self.model_class(**res)
def find_one_and_update(self, query: QueryType, update: QueryType) -> Optional[T]:
res = self.collection.find_one_and_update(query, update, return_document=ReturnDocument.AFTER)
if res:
return self.model_class(**res)
def find_by_id_and_update(self, pk: PKType, update: QueryType) -> Optional[T]:
return self.find_one_and_update({"_id": self._pk(pk)}, update)
def update_by_id(self, pk: PKType, update: QueryType) -> UpdateResult:
return self.collection.update_one({"_id": self._pk(pk)}, update)
def update_one(self, query: QueryType, update: QueryType) -> UpdateResult:
return self.collection.update_one(query, update)
def update_many(self, query: QueryType, update: QueryType, upsert=False) -> UpdateResult:
return self.collection.update_many(query, update, upsert=upsert)
def delete_many(self, query: QueryType) -> DeleteResult:
return self.collection.delete_many(query)
def delete_one(self, query: QueryType) -> DeleteResult:
return self.collection.delete_one(query)
def delete_by_id(self, pk: PKType) -> DeleteResult:
return self.collection.delete_one({"_id": self._pk(pk)})
def count(self, query: QueryType) -> int:
return self.collection.count_documents(query)
def exists(self, query: QueryType) -> bool:
return self.collection.count_documents(query) > 0
def drop_collection(self):
return self.collection.drop()
def _pk(self, pk: PKType):
return ObjectId(pk) if self.wrap_object_id else pk
@staticmethod
def _sort(sort: SortType):
if isinstance(sort, str):
if sort.startswith("-"):
return [(sort[1:], -1)]
return [(sort, 1)]
return sort
@staticmethod
def init(database: Database, model_class: Type[T]) -> MongoCollection[T]:
return MongoCollection(model_class, database)
def make_query(**kwargs) -> QueryType:
query: QueryType = {}
for k, v in kwargs.items():
if v:
query[k] = v
return query
def parse_str_index_model(index: str) -> IndexModel:
unique = index.startswith("!")
index = index.removeprefix("!")
if "," in index:
keys = []
for i in index.split(","):
order = DESCENDING if i.startswith("-") else ASCENDING
i = i.removeprefix("-")
keys.append((i, order))
else:
order = DESCENDING if index.startswith("-") else ASCENDING
index = index.removeprefix("-")
keys = [(index, order)]
if unique:
return IndexModel(keys, unique=True)
return IndexModel(keys)
|
PypiClean
|
/alibabacloud_vcs20200515-3.6.3-py3-none-any.whl/alibabacloud_vcs20200515/models.py
|
from Tea.model import TeaModel
from typing import List, Dict, Any
class AiotDevice(TeaModel):
def __init__(
self,
ipaddr: str = None,
latitude: float = None,
longitude: float = None,
name: str = None,
place: str = None,
port: int = None,
):
# ip地址
self.ipaddr = ipaddr
# 纬度
self.latitude = latitude
# 经度
self.longitude = longitude
# 名称
self.name = name
# 安装位置
self.place = place
# 端口
self.port = port
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.ipaddr is not None:
result['IPAddr'] = self.ipaddr
if self.latitude is not None:
result['Latitude'] = self.latitude
if self.longitude is not None:
result['Longitude'] = self.longitude
if self.name is not None:
result['Name'] = self.name
if self.place is not None:
result['Place'] = self.place
if self.port is not None:
result['Port'] = self.port
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('IPAddr') is not None:
self.ipaddr = m.get('IPAddr')
if m.get('Latitude') is not None:
self.latitude = m.get('Latitude')
if m.get('Longitude') is not None:
self.longitude = m.get('Longitude')
if m.get('Name') is not None:
self.name = m.get('Name')
if m.get('Place') is not None:
self.place = m.get('Place')
if m.get('Port') is not None:
self.port = m.get('Port')
return self
class AddAiotDevicesRequestAiotDeviceList(TeaModel):
def __init__(
self,
corp_id: str = None,
device_id: str = None,
ipaddr: str = None,
latitude: float = None,
longitude: float = None,
name: str = None,
place: str = None,
port: int = None,
):
self.corp_id = corp_id
self.device_id = device_id
self.ipaddr = ipaddr
self.latitude = latitude
self.longitude = longitude
self.name = name
self.place = place
self.port = port
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.ipaddr is not None:
result['IPAddr'] = self.ipaddr
if self.latitude is not None:
result['Latitude'] = self.latitude
if self.longitude is not None:
result['Longitude'] = self.longitude
if self.name is not None:
result['Name'] = self.name
if self.place is not None:
result['Place'] = self.place
if self.port is not None:
result['Port'] = self.port
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('IPAddr') is not None:
self.ipaddr = m.get('IPAddr')
if m.get('Latitude') is not None:
self.latitude = m.get('Latitude')
if m.get('Longitude') is not None:
self.longitude = m.get('Longitude')
if m.get('Name') is not None:
self.name = m.get('Name')
if m.get('Place') is not None:
self.place = m.get('Place')
if m.get('Port') is not None:
self.port = m.get('Port')
return self
class AddAiotDevicesRequest(TeaModel):
def __init__(
self,
aiot_device_list: List[AddAiotDevicesRequestAiotDeviceList] = None,
):
self.aiot_device_list = aiot_device_list
def validate(self):
if self.aiot_device_list:
for k in self.aiot_device_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['AiotDeviceList'] = []
if self.aiot_device_list is not None:
for k in self.aiot_device_list:
result['AiotDeviceList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
self.aiot_device_list = []
if m.get('AiotDeviceList') is not None:
for k in m.get('AiotDeviceList'):
temp_model = AddAiotDevicesRequestAiotDeviceList()
self.aiot_device_list.append(temp_model.from_map(k))
return self
class AddAiotDevicesShrinkRequest(TeaModel):
def __init__(
self,
aiot_device_list_shrink: str = None,
):
self.aiot_device_list_shrink = aiot_device_list_shrink
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.aiot_device_list_shrink is not None:
result['AiotDeviceList'] = self.aiot_device_list_shrink
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AiotDeviceList') is not None:
self.aiot_device_list_shrink = m.get('AiotDeviceList')
return self
class AddAiotDevicesResponseBodyResultList(TeaModel):
def __init__(
self,
code: str = None,
device_id: str = None,
id: str = None,
message: str = None,
password: str = None,
server_host: str = None,
server_host_internal: str = None,
server_id: str = None,
server_ip: str = None,
server_ip_internal: str = None,
server_port: str = None,
server_wss_port: str = None,
user_id: str = None,
):
self.code = code
self.device_id = device_id
self.id = id
self.message = message
self.password = password
self.server_host = server_host
self.server_host_internal = server_host_internal
self.server_id = server_id
self.server_ip = server_ip
self.server_ip_internal = server_ip_internal
self.server_port = server_port
self.server_wss_port = server_wss_port
self.user_id = user_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.id is not None:
result['Id'] = self.id
if self.message is not None:
result['Message'] = self.message
if self.password is not None:
result['Password'] = self.password
if self.server_host is not None:
result['ServerHost'] = self.server_host
if self.server_host_internal is not None:
result['ServerHostInternal'] = self.server_host_internal
if self.server_id is not None:
result['ServerId'] = self.server_id
if self.server_ip is not None:
result['ServerIp'] = self.server_ip
if self.server_ip_internal is not None:
result['ServerIpInternal'] = self.server_ip_internal
if self.server_port is not None:
result['ServerPort'] = self.server_port
if self.server_wss_port is not None:
result['ServerWssPort'] = self.server_wss_port
if self.user_id is not None:
result['UserId'] = self.user_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('Id') is not None:
self.id = m.get('Id')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('Password') is not None:
self.password = m.get('Password')
if m.get('ServerHost') is not None:
self.server_host = m.get('ServerHost')
if m.get('ServerHostInternal') is not None:
self.server_host_internal = m.get('ServerHostInternal')
if m.get('ServerId') is not None:
self.server_id = m.get('ServerId')
if m.get('ServerIp') is not None:
self.server_ip = m.get('ServerIp')
if m.get('ServerIpInternal') is not None:
self.server_ip_internal = m.get('ServerIpInternal')
if m.get('ServerPort') is not None:
self.server_port = m.get('ServerPort')
if m.get('ServerWssPort') is not None:
self.server_wss_port = m.get('ServerWssPort')
if m.get('UserId') is not None:
self.user_id = m.get('UserId')
return self
class AddAiotDevicesResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
result_list: List[AddAiotDevicesResponseBodyResultList] = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
self.result_list = result_list
def validate(self):
if self.result_list:
for k in self.result_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
result['ResultList'] = []
if self.result_list is not None:
for k in self.result_list:
result['ResultList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.result_list = []
if m.get('ResultList') is not None:
for k in m.get('ResultList'):
temp_model = AddAiotDevicesResponseBodyResultList()
self.result_list.append(temp_model.from_map(k))
return self
class AddAiotDevicesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: AddAiotDevicesResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = AddAiotDevicesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class AddAiotPersonTableRequestPersonTable(TeaModel):
def __init__(
self,
name: str = None,
person_table_id: str = None,
type: int = None,
verification_model_list: List[int] = None,
):
self.name = name
self.person_table_id = person_table_id
self.type = type
self.verification_model_list = verification_model_list
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.name is not None:
result['Name'] = self.name
if self.person_table_id is not None:
result['PersonTableId'] = self.person_table_id
if self.type is not None:
result['Type'] = self.type
if self.verification_model_list is not None:
result['VerificationModelList'] = self.verification_model_list
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Name') is not None:
self.name = m.get('Name')
if m.get('PersonTableId') is not None:
self.person_table_id = m.get('PersonTableId')
if m.get('Type') is not None:
self.type = m.get('Type')
if m.get('VerificationModelList') is not None:
self.verification_model_list = m.get('VerificationModelList')
return self
class AddAiotPersonTableRequest(TeaModel):
def __init__(
self,
id: str = None,
person_table: AddAiotPersonTableRequestPersonTable = None,
):
self.id = id
self.person_table = person_table
def validate(self):
if self.person_table:
self.person_table.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['Id'] = self.id
if self.person_table is not None:
result['PersonTable'] = self.person_table.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Id') is not None:
self.id = m.get('Id')
if m.get('PersonTable') is not None:
temp_model = AddAiotPersonTableRequestPersonTable()
self.person_table = temp_model.from_map(m['PersonTable'])
return self
class AddAiotPersonTableResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
person_table_id: str = None,
request_id: str = None,
):
self.code = code
self.message = message
self.person_table_id = person_table_id
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.person_table_id is not None:
result['PersonTableId'] = self.person_table_id
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('PersonTableId') is not None:
self.person_table_id = m.get('PersonTableId')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class AddAiotPersonTableResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: AddAiotPersonTableResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = AddAiotPersonTableResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class AddAiotPersonTableItemsRequestPersonTableItemListIdentificationList(TeaModel):
def __init__(
self,
number: str = None,
type: int = None,
):
self.number = number
self.type = type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.number is not None:
result['Number'] = self.number
if self.type is not None:
result['Type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Number') is not None:
self.number = m.get('Number')
if m.get('Type') is not None:
self.type = m.get('Type')
return self
class AddAiotPersonTableItemsRequestPersonTableItemListImageListFeatureInfo(TeaModel):
def __init__(
self,
algorithm_type: str = None,
algorithm_version: str = None,
feature_data: str = None,
image_id: str = None,
object_id: str = None,
tab_ied: str = None,
vendor: str = None,
):
self.algorithm_type = algorithm_type
self.algorithm_version = algorithm_version
self.feature_data = feature_data
self.image_id = image_id
self.object_id = object_id
self.tab_ied = tab_ied
self.vendor = vendor
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_type is not None:
result['AlgorithmType'] = self.algorithm_type
if self.algorithm_version is not None:
result['AlgorithmVersion'] = self.algorithm_version
if self.feature_data is not None:
result['FeatureData'] = self.feature_data
if self.image_id is not None:
result['ImageId'] = self.image_id
if self.object_id is not None:
result['ObjectId'] = self.object_id
if self.tab_ied is not None:
result['TabIed'] = self.tab_ied
if self.vendor is not None:
result['Vendor'] = self.vendor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmType') is not None:
self.algorithm_type = m.get('AlgorithmType')
if m.get('AlgorithmVersion') is not None:
self.algorithm_version = m.get('AlgorithmVersion')
if m.get('FeatureData') is not None:
self.feature_data = m.get('FeatureData')
if m.get('ImageId') is not None:
self.image_id = m.get('ImageId')
if m.get('ObjectId') is not None:
self.object_id = m.get('ObjectId')
if m.get('TabIed') is not None:
self.tab_ied = m.get('TabIed')
if m.get('Vendor') is not None:
self.vendor = m.get('Vendor')
return self
class AddAiotPersonTableItemsRequestPersonTableItemListImageList(TeaModel):
def __init__(
self,
data: str = None,
device_id: str = None,
event_sort: str = None,
feature_info: AddAiotPersonTableItemsRequestPersonTableItemListImageListFeatureInfo = None,
file_format: str = None,
height: int = None,
image_id: str = None,
shot_time: str = None,
size: int = None,
storage_path: str = None,
type: str = None,
width: int = None,
):
self.data = data
self.device_id = device_id
self.event_sort = event_sort
self.feature_info = feature_info
self.file_format = file_format
self.height = height
self.image_id = image_id
self.shot_time = shot_time
self.size = size
self.storage_path = storage_path
self.type = type
self.width = width
def validate(self):
if self.feature_info:
self.feature_info.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.data is not None:
result['Data'] = self.data
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.event_sort is not None:
result['EventSort'] = self.event_sort
if self.feature_info is not None:
result['FeatureInfo'] = self.feature_info.to_map()
if self.file_format is not None:
result['FileFormat'] = self.file_format
if self.height is not None:
result['Height'] = self.height
if self.image_id is not None:
result['ImageId'] = self.image_id
if self.shot_time is not None:
result['ShotTime'] = self.shot_time
if self.size is not None:
result['Size'] = self.size
if self.storage_path is not None:
result['StoragePath'] = self.storage_path
if self.type is not None:
result['Type'] = self.type
if self.width is not None:
result['Width'] = self.width
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('EventSort') is not None:
self.event_sort = m.get('EventSort')
if m.get('FeatureInfo') is not None:
temp_model = AddAiotPersonTableItemsRequestPersonTableItemListImageListFeatureInfo()
self.feature_info = temp_model.from_map(m['FeatureInfo'])
if m.get('FileFormat') is not None:
self.file_format = m.get('FileFormat')
if m.get('Height') is not None:
self.height = m.get('Height')
if m.get('ImageId') is not None:
self.image_id = m.get('ImageId')
if m.get('ShotTime') is not None:
self.shot_time = m.get('ShotTime')
if m.get('Size') is not None:
self.size = m.get('Size')
if m.get('StoragePath') is not None:
self.storage_path = m.get('StoragePath')
if m.get('Type') is not None:
self.type = m.get('Type')
if m.get('Width') is not None:
self.width = m.get('Width')
return self
class AddAiotPersonTableItemsRequestPersonTableItemList(TeaModel):
def __init__(
self,
identification_list: List[AddAiotPersonTableItemsRequestPersonTableItemListIdentificationList] = None,
identification_num: int = None,
image_list: List[AddAiotPersonTableItemsRequestPersonTableItemListImageList] = None,
image_num: int = None,
person_code: str = None,
person_id: str = None,
person_name: str = None,
remarks: str = None,
):
self.identification_list = identification_list
self.identification_num = identification_num
self.image_list = image_list
self.image_num = image_num
self.person_code = person_code
self.person_id = person_id
self.person_name = person_name
self.remarks = remarks
def validate(self):
if self.identification_list:
for k in self.identification_list:
if k:
k.validate()
if self.image_list:
for k in self.image_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['IdentificationList'] = []
if self.identification_list is not None:
for k in self.identification_list:
result['IdentificationList'].append(k.to_map() if k else None)
if self.identification_num is not None:
result['IdentificationNum'] = self.identification_num
result['ImageList'] = []
if self.image_list is not None:
for k in self.image_list:
result['ImageList'].append(k.to_map() if k else None)
if self.image_num is not None:
result['ImageNum'] = self.image_num
if self.person_code is not None:
result['PersonCode'] = self.person_code
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.person_name is not None:
result['PersonName'] = self.person_name
if self.remarks is not None:
result['Remarks'] = self.remarks
return result
def from_map(self, m: dict = None):
m = m or dict()
self.identification_list = []
if m.get('IdentificationList') is not None:
for k in m.get('IdentificationList'):
temp_model = AddAiotPersonTableItemsRequestPersonTableItemListIdentificationList()
self.identification_list.append(temp_model.from_map(k))
if m.get('IdentificationNum') is not None:
self.identification_num = m.get('IdentificationNum')
self.image_list = []
if m.get('ImageList') is not None:
for k in m.get('ImageList'):
temp_model = AddAiotPersonTableItemsRequestPersonTableItemListImageList()
self.image_list.append(temp_model.from_map(k))
if m.get('ImageNum') is not None:
self.image_num = m.get('ImageNum')
if m.get('PersonCode') is not None:
self.person_code = m.get('PersonCode')
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('PersonName') is not None:
self.person_name = m.get('PersonName')
if m.get('Remarks') is not None:
self.remarks = m.get('Remarks')
return self
class AddAiotPersonTableItemsRequest(TeaModel):
def __init__(
self,
id: str = None,
person_table_id: str = None,
person_table_item_list: List[AddAiotPersonTableItemsRequestPersonTableItemList] = None,
):
self.id = id
self.person_table_id = person_table_id
self.person_table_item_list = person_table_item_list
def validate(self):
if self.person_table_item_list:
for k in self.person_table_item_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['Id'] = self.id
if self.person_table_id is not None:
result['PersonTableId'] = self.person_table_id
result['PersonTableItemList'] = []
if self.person_table_item_list is not None:
for k in self.person_table_item_list:
result['PersonTableItemList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Id') is not None:
self.id = m.get('Id')
if m.get('PersonTableId') is not None:
self.person_table_id = m.get('PersonTableId')
self.person_table_item_list = []
if m.get('PersonTableItemList') is not None:
for k in m.get('PersonTableItemList'):
temp_model = AddAiotPersonTableItemsRequestPersonTableItemList()
self.person_table_item_list.append(temp_model.from_map(k))
return self
class AddAiotPersonTableItemsResponseBodyResultList(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
person_table_item_id: str = None,
):
self.code = code
self.message = message
self.person_table_item_id = person_table_item_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.person_table_item_id is not None:
result['PersonTableItemId'] = self.person_table_item_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('PersonTableItemId') is not None:
self.person_table_item_id = m.get('PersonTableItemId')
return self
class AddAiotPersonTableItemsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
result_list: List[AddAiotPersonTableItemsResponseBodyResultList] = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
self.result_list = result_list
def validate(self):
if self.result_list:
for k in self.result_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
result['ResultList'] = []
if self.result_list is not None:
for k in self.result_list:
result['ResultList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.result_list = []
if m.get('ResultList') is not None:
for k in m.get('ResultList'):
temp_model = AddAiotPersonTableItemsResponseBodyResultList()
self.result_list.append(temp_model.from_map(k))
return self
class AddAiotPersonTableItemsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: AddAiotPersonTableItemsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = AddAiotPersonTableItemsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class AddAiotPersonTableItemsForTablesRequestPersonTableItemListIdentificationList(TeaModel):
def __init__(
self,
number: str = None,
type: int = None,
):
self.number = number
self.type = type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.number is not None:
result['Number'] = self.number
if self.type is not None:
result['Type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Number') is not None:
self.number = m.get('Number')
if m.get('Type') is not None:
self.type = m.get('Type')
return self
class AddAiotPersonTableItemsForTablesRequestPersonTableItemListImageListFeatureInfo(TeaModel):
def __init__(
self,
algorithm_type: str = None,
algorithm_version: str = None,
feature_data: str = None,
image_id: str = None,
object_id: str = None,
tab_ied: str = None,
vendor: str = None,
):
self.algorithm_type = algorithm_type
self.algorithm_version = algorithm_version
self.feature_data = feature_data
self.image_id = image_id
self.object_id = object_id
self.tab_ied = tab_ied
self.vendor = vendor
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_type is not None:
result['AlgorithmType'] = self.algorithm_type
if self.algorithm_version is not None:
result['AlgorithmVersion'] = self.algorithm_version
if self.feature_data is not None:
result['FeatureData'] = self.feature_data
if self.image_id is not None:
result['ImageId'] = self.image_id
if self.object_id is not None:
result['ObjectId'] = self.object_id
if self.tab_ied is not None:
result['TabIed'] = self.tab_ied
if self.vendor is not None:
result['Vendor'] = self.vendor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmType') is not None:
self.algorithm_type = m.get('AlgorithmType')
if m.get('AlgorithmVersion') is not None:
self.algorithm_version = m.get('AlgorithmVersion')
if m.get('FeatureData') is not None:
self.feature_data = m.get('FeatureData')
if m.get('ImageId') is not None:
self.image_id = m.get('ImageId')
if m.get('ObjectId') is not None:
self.object_id = m.get('ObjectId')
if m.get('TabIed') is not None:
self.tab_ied = m.get('TabIed')
if m.get('Vendor') is not None:
self.vendor = m.get('Vendor')
return self
class AddAiotPersonTableItemsForTablesRequestPersonTableItemListImageList(TeaModel):
def __init__(
self,
data: str = None,
device_id: str = None,
event_sort: str = None,
feature_info: AddAiotPersonTableItemsForTablesRequestPersonTableItemListImageListFeatureInfo = None,
file_format: str = None,
height: int = None,
image_id: str = None,
shot_time: str = None,
size: int = None,
storage_path: str = None,
type: str = None,
width: int = None,
):
self.data = data
self.device_id = device_id
self.event_sort = event_sort
self.feature_info = feature_info
self.file_format = file_format
self.height = height
self.image_id = image_id
self.shot_time = shot_time
self.size = size
self.storage_path = storage_path
self.type = type
self.width = width
def validate(self):
if self.feature_info:
self.feature_info.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.data is not None:
result['Data'] = self.data
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.event_sort is not None:
result['EventSort'] = self.event_sort
if self.feature_info is not None:
result['FeatureInfo'] = self.feature_info.to_map()
if self.file_format is not None:
result['FileFormat'] = self.file_format
if self.height is not None:
result['Height'] = self.height
if self.image_id is not None:
result['ImageId'] = self.image_id
if self.shot_time is not None:
result['ShotTime'] = self.shot_time
if self.size is not None:
result['Size'] = self.size
if self.storage_path is not None:
result['StoragePath'] = self.storage_path
if self.type is not None:
result['Type'] = self.type
if self.width is not None:
result['Width'] = self.width
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('EventSort') is not None:
self.event_sort = m.get('EventSort')
if m.get('FeatureInfo') is not None:
temp_model = AddAiotPersonTableItemsForTablesRequestPersonTableItemListImageListFeatureInfo()
self.feature_info = temp_model.from_map(m['FeatureInfo'])
if m.get('FileFormat') is not None:
self.file_format = m.get('FileFormat')
if m.get('Height') is not None:
self.height = m.get('Height')
if m.get('ImageId') is not None:
self.image_id = m.get('ImageId')
if m.get('ShotTime') is not None:
self.shot_time = m.get('ShotTime')
if m.get('Size') is not None:
self.size = m.get('Size')
if m.get('StoragePath') is not None:
self.storage_path = m.get('StoragePath')
if m.get('Type') is not None:
self.type = m.get('Type')
if m.get('Width') is not None:
self.width = m.get('Width')
return self
class AddAiotPersonTableItemsForTablesRequestPersonTableItemList(TeaModel):
def __init__(
self,
identification_list: List[AddAiotPersonTableItemsForTablesRequestPersonTableItemListIdentificationList] = None,
identification_num: int = None,
image_list: List[AddAiotPersonTableItemsForTablesRequestPersonTableItemListImageList] = None,
image_num: int = None,
person_code: str = None,
person_id: str = None,
person_name: str = None,
remarks: str = None,
):
self.identification_list = identification_list
self.identification_num = identification_num
self.image_list = image_list
self.image_num = image_num
self.person_code = person_code
self.person_id = person_id
self.person_name = person_name
self.remarks = remarks
def validate(self):
if self.identification_list:
for k in self.identification_list:
if k:
k.validate()
if self.image_list:
for k in self.image_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['IdentificationList'] = []
if self.identification_list is not None:
for k in self.identification_list:
result['IdentificationList'].append(k.to_map() if k else None)
if self.identification_num is not None:
result['IdentificationNum'] = self.identification_num
result['ImageList'] = []
if self.image_list is not None:
for k in self.image_list:
result['ImageList'].append(k.to_map() if k else None)
if self.image_num is not None:
result['ImageNum'] = self.image_num
if self.person_code is not None:
result['PersonCode'] = self.person_code
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.person_name is not None:
result['PersonName'] = self.person_name
if self.remarks is not None:
result['Remarks'] = self.remarks
return result
def from_map(self, m: dict = None):
m = m or dict()
self.identification_list = []
if m.get('IdentificationList') is not None:
for k in m.get('IdentificationList'):
temp_model = AddAiotPersonTableItemsForTablesRequestPersonTableItemListIdentificationList()
self.identification_list.append(temp_model.from_map(k))
if m.get('IdentificationNum') is not None:
self.identification_num = m.get('IdentificationNum')
self.image_list = []
if m.get('ImageList') is not None:
for k in m.get('ImageList'):
temp_model = AddAiotPersonTableItemsForTablesRequestPersonTableItemListImageList()
self.image_list.append(temp_model.from_map(k))
if m.get('ImageNum') is not None:
self.image_num = m.get('ImageNum')
if m.get('PersonCode') is not None:
self.person_code = m.get('PersonCode')
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('PersonName') is not None:
self.person_name = m.get('PersonName')
if m.get('Remarks') is not None:
self.remarks = m.get('Remarks')
return self
class AddAiotPersonTableItemsForTablesRequestPersonTableList(TeaModel):
def __init__(
self,
id: str = None,
person_table_id: str = None,
):
self.id = id
self.person_table_id = person_table_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['Id'] = self.id
if self.person_table_id is not None:
result['PersonTableId'] = self.person_table_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Id') is not None:
self.id = m.get('Id')
if m.get('PersonTableId') is not None:
self.person_table_id = m.get('PersonTableId')
return self
class AddAiotPersonTableItemsForTablesRequest(TeaModel):
def __init__(
self,
person_table_item_list: List[AddAiotPersonTableItemsForTablesRequestPersonTableItemList] = None,
person_table_list: List[AddAiotPersonTableItemsForTablesRequestPersonTableList] = None,
):
self.person_table_item_list = person_table_item_list
self.person_table_list = person_table_list
def validate(self):
if self.person_table_item_list:
for k in self.person_table_item_list:
if k:
k.validate()
if self.person_table_list:
for k in self.person_table_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['PersonTableItemList'] = []
if self.person_table_item_list is not None:
for k in self.person_table_item_list:
result['PersonTableItemList'].append(k.to_map() if k else None)
result['PersonTableList'] = []
if self.person_table_list is not None:
for k in self.person_table_list:
result['PersonTableList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
self.person_table_item_list = []
if m.get('PersonTableItemList') is not None:
for k in m.get('PersonTableItemList'):
temp_model = AddAiotPersonTableItemsForTablesRequestPersonTableItemList()
self.person_table_item_list.append(temp_model.from_map(k))
self.person_table_list = []
if m.get('PersonTableList') is not None:
for k in m.get('PersonTableList'):
temp_model = AddAiotPersonTableItemsForTablesRequestPersonTableList()
self.person_table_list.append(temp_model.from_map(k))
return self
class AddAiotPersonTableItemsForTablesResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class AddAiotPersonTableItemsForTablesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: AddAiotPersonTableItemsForTablesResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = AddAiotPersonTableItemsForTablesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class AddAiotVehicleTableItemsRequestVehicleTableItem(TeaModel):
def __init__(
self,
begin_time: str = None,
end_time: str = None,
owner_name: str = None,
phone_no: str = None,
plate_no: str = None,
remarks: str = None,
vehicle_table_item_id: str = None,
):
self.begin_time = begin_time
self.end_time = end_time
self.owner_name = owner_name
self.phone_no = phone_no
self.plate_no = plate_no
self.remarks = remarks
self.vehicle_table_item_id = vehicle_table_item_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.begin_time is not None:
result['BeginTime'] = self.begin_time
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.owner_name is not None:
result['OwnerName'] = self.owner_name
if self.phone_no is not None:
result['PhoneNo'] = self.phone_no
if self.plate_no is not None:
result['PlateNo'] = self.plate_no
if self.remarks is not None:
result['Remarks'] = self.remarks
if self.vehicle_table_item_id is not None:
result['VehicleTableItemId'] = self.vehicle_table_item_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BeginTime') is not None:
self.begin_time = m.get('BeginTime')
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('OwnerName') is not None:
self.owner_name = m.get('OwnerName')
if m.get('PhoneNo') is not None:
self.phone_no = m.get('PhoneNo')
if m.get('PlateNo') is not None:
self.plate_no = m.get('PlateNo')
if m.get('Remarks') is not None:
self.remarks = m.get('Remarks')
if m.get('VehicleTableItemId') is not None:
self.vehicle_table_item_id = m.get('VehicleTableItemId')
return self
class AddAiotVehicleTableItemsRequest(TeaModel):
def __init__(
self,
id: str = None,
vehicle_table_id: str = None,
vehicle_table_item: AddAiotVehicleTableItemsRequestVehicleTableItem = None,
):
self.id = id
self.vehicle_table_id = vehicle_table_id
self.vehicle_table_item = vehicle_table_item
def validate(self):
if self.vehicle_table_item:
self.vehicle_table_item.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['Id'] = self.id
if self.vehicle_table_id is not None:
result['VehicleTableId'] = self.vehicle_table_id
if self.vehicle_table_item is not None:
result['VehicleTableItem'] = self.vehicle_table_item.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Id') is not None:
self.id = m.get('Id')
if m.get('VehicleTableId') is not None:
self.vehicle_table_id = m.get('VehicleTableId')
if m.get('VehicleTableItem') is not None:
temp_model = AddAiotVehicleTableItemsRequestVehicleTableItem()
self.vehicle_table_item = temp_model.from_map(m['VehicleTableItem'])
return self
class AddAiotVehicleTableItemsShrinkRequest(TeaModel):
def __init__(
self,
id: str = None,
vehicle_table_id: str = None,
vehicle_table_item_shrink: str = None,
):
self.id = id
self.vehicle_table_id = vehicle_table_id
self.vehicle_table_item_shrink = vehicle_table_item_shrink
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['Id'] = self.id
if self.vehicle_table_id is not None:
result['VehicleTableId'] = self.vehicle_table_id
if self.vehicle_table_item_shrink is not None:
result['VehicleTableItem'] = self.vehicle_table_item_shrink
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Id') is not None:
self.id = m.get('Id')
if m.get('VehicleTableId') is not None:
self.vehicle_table_id = m.get('VehicleTableId')
if m.get('VehicleTableItem') is not None:
self.vehicle_table_item_shrink = m.get('VehicleTableItem')
return self
class AddAiotVehicleTableItemsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class AddAiotVehicleTableItemsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: AddAiotVehicleTableItemsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = AddAiotVehicleTableItemsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class AddCameraForInstanceRequest(TeaModel):
def __init__(
self,
camera_ids: List[str] = None,
instance_id: str = None,
):
# 设备Id
self.camera_ids = camera_ids
# 实例Id
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.camera_ids is not None:
result['CameraIds'] = self.camera_ids
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CameraIds') is not None:
self.camera_ids = m.get('CameraIds')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class AddCameraForInstanceShrinkRequest(TeaModel):
def __init__(
self,
camera_ids_shrink: str = None,
instance_id: str = None,
):
# 设备Id
self.camera_ids_shrink = camera_ids_shrink
# 实例Id
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.camera_ids_shrink is not None:
result['CameraIds'] = self.camera_ids_shrink
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CameraIds') is not None:
self.camera_ids_shrink = m.get('CameraIds')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class AddCameraForInstanceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
):
# code码
self.code = code
# 返回结果消息
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class AddCameraForInstanceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: AddCameraForInstanceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = AddCameraForInstanceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class AddChannelRequest(TeaModel):
def __init__(
self,
parent_device_gb_id: str = None,
):
self.parent_device_gb_id = parent_device_gb_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.parent_device_gb_id is not None:
result['ParentDeviceGbId'] = self.parent_device_gb_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ParentDeviceGbId') is not None:
self.parent_device_gb_id = m.get('ParentDeviceGbId')
return self
class AddChannelResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class AddChannelResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: AddChannelResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = AddChannelResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class AddDataSourceRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
data_source_name: str = None,
data_source_type: str = None,
description: str = None,
url: str = None,
):
self.corp_id = corp_id
self.data_source_name = data_source_name
self.data_source_type = data_source_type
self.description = description
self.url = url
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.data_source_name is not None:
result['DataSourceName'] = self.data_source_name
if self.data_source_type is not None:
result['DataSourceType'] = self.data_source_type
if self.description is not None:
result['Description'] = self.description
if self.url is not None:
result['Url'] = self.url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DataSourceName') is not None:
self.data_source_name = m.get('DataSourceName')
if m.get('DataSourceType') is not None:
self.data_source_type = m.get('DataSourceType')
if m.get('Description') is not None:
self.description = m.get('Description')
if m.get('Url') is not None:
self.url = m.get('Url')
return self
class AddDataSourceResponseBodyData(TeaModel):
def __init__(
self,
data_source_id: str = None,
kafka_topic: str = None,
oss_path: str = None,
):
self.data_source_id = data_source_id
self.kafka_topic = kafka_topic
self.oss_path = oss_path
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.data_source_id is not None:
result['DataSourceId'] = self.data_source_id
if self.kafka_topic is not None:
result['KafkaTopic'] = self.kafka_topic
if self.oss_path is not None:
result['OssPath'] = self.oss_path
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DataSourceId') is not None:
self.data_source_id = m.get('DataSourceId')
if m.get('KafkaTopic') is not None:
self.kafka_topic = m.get('KafkaTopic')
if m.get('OssPath') is not None:
self.oss_path = m.get('OssPath')
return self
class AddDataSourceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: AddDataSourceResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = AddDataSourceResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class AddDataSourceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: AddDataSourceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = AddDataSourceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class AddDeviceRequest(TeaModel):
def __init__(
self,
bit_rate: str = None,
corp_id: str = None,
device_address: str = None,
device_direction: str = None,
device_name: str = None,
device_resolution: str = None,
device_site: str = None,
device_type: str = None,
gb_id: str = None,
vendor: str = None,
):
self.bit_rate = bit_rate
self.corp_id = corp_id
self.device_address = device_address
self.device_direction = device_direction
self.device_name = device_name
self.device_resolution = device_resolution
self.device_site = device_site
self.device_type = device_type
self.gb_id = gb_id
self.vendor = vendor
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.bit_rate is not None:
result['BitRate'] = self.bit_rate
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.device_address is not None:
result['DeviceAddress'] = self.device_address
if self.device_direction is not None:
result['DeviceDirection'] = self.device_direction
if self.device_name is not None:
result['DeviceName'] = self.device_name
if self.device_resolution is not None:
result['DeviceResolution'] = self.device_resolution
if self.device_site is not None:
result['DeviceSite'] = self.device_site
if self.device_type is not None:
result['DeviceType'] = self.device_type
if self.gb_id is not None:
result['GbId'] = self.gb_id
if self.vendor is not None:
result['Vendor'] = self.vendor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BitRate') is not None:
self.bit_rate = m.get('BitRate')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DeviceAddress') is not None:
self.device_address = m.get('DeviceAddress')
if m.get('DeviceDirection') is not None:
self.device_direction = m.get('DeviceDirection')
if m.get('DeviceName') is not None:
self.device_name = m.get('DeviceName')
if m.get('DeviceResolution') is not None:
self.device_resolution = m.get('DeviceResolution')
if m.get('DeviceSite') is not None:
self.device_site = m.get('DeviceSite')
if m.get('DeviceType') is not None:
self.device_type = m.get('DeviceType')
if m.get('GbId') is not None:
self.gb_id = m.get('GbId')
if m.get('Vendor') is not None:
self.vendor = m.get('Vendor')
return self
class AddDeviceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class AddDeviceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: AddDeviceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = AddDeviceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class AddDoubleVerificationGroupsRequestDoubleVerificationGroupListPersonIdList(TeaModel):
def __init__(
self,
person_id: str = None,
person_table_id: str = None,
):
self.person_id = person_id
self.person_table_id = person_table_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.person_table_id is not None:
result['PersonTableId'] = self.person_table_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('PersonTableId') is not None:
self.person_table_id = m.get('PersonTableId')
return self
class AddDoubleVerificationGroupsRequestDoubleVerificationGroupList(TeaModel):
def __init__(
self,
group_id: str = None,
interval: int = None,
member_number: int = None,
person_id_list: List[AddDoubleVerificationGroupsRequestDoubleVerificationGroupListPersonIdList] = None,
):
self.group_id = group_id
self.interval = interval
self.member_number = member_number
self.person_id_list = person_id_list
def validate(self):
if self.person_id_list:
for k in self.person_id_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.group_id is not None:
result['GroupId'] = self.group_id
if self.interval is not None:
result['Interval'] = self.interval
if self.member_number is not None:
result['MemberNumber'] = self.member_number
result['PersonIdList'] = []
if self.person_id_list is not None:
for k in self.person_id_list:
result['PersonIdList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('GroupId') is not None:
self.group_id = m.get('GroupId')
if m.get('Interval') is not None:
self.interval = m.get('Interval')
if m.get('MemberNumber') is not None:
self.member_number = m.get('MemberNumber')
self.person_id_list = []
if m.get('PersonIdList') is not None:
for k in m.get('PersonIdList'):
temp_model = AddDoubleVerificationGroupsRequestDoubleVerificationGroupListPersonIdList()
self.person_id_list.append(temp_model.from_map(k))
return self
class AddDoubleVerificationGroupsRequest(TeaModel):
def __init__(
self,
double_verification_group_list: List[AddDoubleVerificationGroupsRequestDoubleVerificationGroupList] = None,
id: str = None,
):
self.double_verification_group_list = double_verification_group_list
self.id = id
def validate(self):
if self.double_verification_group_list:
for k in self.double_verification_group_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['DoubleVerificationGroupList'] = []
if self.double_verification_group_list is not None:
for k in self.double_verification_group_list:
result['DoubleVerificationGroupList'].append(k.to_map() if k else None)
if self.id is not None:
result['Id'] = self.id
return result
def from_map(self, m: dict = None):
m = m or dict()
self.double_verification_group_list = []
if m.get('DoubleVerificationGroupList') is not None:
for k in m.get('DoubleVerificationGroupList'):
temp_model = AddDoubleVerificationGroupsRequestDoubleVerificationGroupList()
self.double_verification_group_list.append(temp_model.from_map(k))
if m.get('Id') is not None:
self.id = m.get('Id')
return self
class AddDoubleVerificationGroupsResponseBodyResultList(TeaModel):
def __init__(
self,
code: str = None,
group_id: str = None,
message: str = None,
):
self.code = code
self.group_id = group_id
self.message = message
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.group_id is not None:
result['GroupId'] = self.group_id
if self.message is not None:
result['Message'] = self.message
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('GroupId') is not None:
self.group_id = m.get('GroupId')
if m.get('Message') is not None:
self.message = m.get('Message')
return self
class AddDoubleVerificationGroupsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
result_list: List[AddDoubleVerificationGroupsResponseBodyResultList] = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
self.result_list = result_list
def validate(self):
if self.result_list:
for k in self.result_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
result['ResultList'] = []
if self.result_list is not None:
for k in self.result_list:
result['ResultList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.result_list = []
if m.get('ResultList') is not None:
for k in m.get('ResultList'):
temp_model = AddDoubleVerificationGroupsResponseBodyResultList()
self.result_list.append(temp_model.from_map(k))
return self
class AddDoubleVerificationGroupsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: AddDoubleVerificationGroupsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = AddDoubleVerificationGroupsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class AddMonitorRequest(TeaModel):
def __init__(
self,
algorithm_vendor: str = None,
batch_indicator: int = None,
corp_id: str = None,
description: str = None,
monitor_type: str = None,
notifier_app_secret: str = None,
notifier_extend_values: str = None,
notifier_time_out: int = None,
notifier_type: str = None,
notifier_url: str = None,
):
self.algorithm_vendor = algorithm_vendor
self.batch_indicator = batch_indicator
self.corp_id = corp_id
self.description = description
self.monitor_type = monitor_type
self.notifier_app_secret = notifier_app_secret
self.notifier_extend_values = notifier_extend_values
self.notifier_time_out = notifier_time_out
self.notifier_type = notifier_type
self.notifier_url = notifier_url
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_vendor is not None:
result['AlgorithmVendor'] = self.algorithm_vendor
if self.batch_indicator is not None:
result['BatchIndicator'] = self.batch_indicator
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.description is not None:
result['Description'] = self.description
if self.monitor_type is not None:
result['MonitorType'] = self.monitor_type
if self.notifier_app_secret is not None:
result['NotifierAppSecret'] = self.notifier_app_secret
if self.notifier_extend_values is not None:
result['NotifierExtendValues'] = self.notifier_extend_values
if self.notifier_time_out is not None:
result['NotifierTimeOut'] = self.notifier_time_out
if self.notifier_type is not None:
result['NotifierType'] = self.notifier_type
if self.notifier_url is not None:
result['NotifierUrl'] = self.notifier_url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmVendor') is not None:
self.algorithm_vendor = m.get('AlgorithmVendor')
if m.get('BatchIndicator') is not None:
self.batch_indicator = m.get('BatchIndicator')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('Description') is not None:
self.description = m.get('Description')
if m.get('MonitorType') is not None:
self.monitor_type = m.get('MonitorType')
if m.get('NotifierAppSecret') is not None:
self.notifier_app_secret = m.get('NotifierAppSecret')
if m.get('NotifierExtendValues') is not None:
self.notifier_extend_values = m.get('NotifierExtendValues')
if m.get('NotifierTimeOut') is not None:
self.notifier_time_out = m.get('NotifierTimeOut')
if m.get('NotifierType') is not None:
self.notifier_type = m.get('NotifierType')
if m.get('NotifierUrl') is not None:
self.notifier_url = m.get('NotifierUrl')
return self
class AddMonitorResponseBodyData(TeaModel):
def __init__(
self,
task_id: str = None,
):
self.task_id = task_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.task_id is not None:
result['TaskId'] = self.task_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('TaskId') is not None:
self.task_id = m.get('TaskId')
return self
class AddMonitorResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: AddMonitorResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = AddMonitorResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class AddMonitorResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: AddMonitorResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = AddMonitorResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class AddProfileRequest(TeaModel):
def __init__(
self,
biz_id: str = None,
catalog_id: int = None,
corp_id: str = None,
face_url: str = None,
gender: int = None,
id_number: str = None,
isv_sub_id: str = None,
live_address: str = None,
name: str = None,
phone_no: str = None,
plate_no: str = None,
scene_type: str = None,
):
self.biz_id = biz_id
self.catalog_id = catalog_id
self.corp_id = corp_id
self.face_url = face_url
self.gender = gender
self.id_number = id_number
self.isv_sub_id = isv_sub_id
self.live_address = live_address
self.name = name
self.phone_no = phone_no
self.plate_no = plate_no
self.scene_type = scene_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.biz_id is not None:
result['BizId'] = self.biz_id
if self.catalog_id is not None:
result['CatalogId'] = self.catalog_id
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.face_url is not None:
result['FaceUrl'] = self.face_url
if self.gender is not None:
result['Gender'] = self.gender
if self.id_number is not None:
result['IdNumber'] = self.id_number
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.live_address is not None:
result['LiveAddress'] = self.live_address
if self.name is not None:
result['Name'] = self.name
if self.phone_no is not None:
result['PhoneNo'] = self.phone_no
if self.plate_no is not None:
result['PlateNo'] = self.plate_no
if self.scene_type is not None:
result['SceneType'] = self.scene_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BizId') is not None:
self.biz_id = m.get('BizId')
if m.get('CatalogId') is not None:
self.catalog_id = m.get('CatalogId')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('FaceUrl') is not None:
self.face_url = m.get('FaceUrl')
if m.get('Gender') is not None:
self.gender = m.get('Gender')
if m.get('IdNumber') is not None:
self.id_number = m.get('IdNumber')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('LiveAddress') is not None:
self.live_address = m.get('LiveAddress')
if m.get('Name') is not None:
self.name = m.get('Name')
if m.get('PhoneNo') is not None:
self.phone_no = m.get('PhoneNo')
if m.get('PlateNo') is not None:
self.plate_no = m.get('PlateNo')
if m.get('SceneType') is not None:
self.scene_type = m.get('SceneType')
return self
class AddProfileResponseBodyData(TeaModel):
def __init__(
self,
biz_id: str = None,
catalog_id: int = None,
face_url: str = None,
gender: str = None,
id_number: str = None,
isv_sub_id: str = None,
live_address: str = None,
name: str = None,
phone_no: str = None,
plate_no: str = None,
profile_id: int = None,
scene_type: str = None,
):
self.biz_id = biz_id
self.catalog_id = catalog_id
self.face_url = face_url
self.gender = gender
self.id_number = id_number
self.isv_sub_id = isv_sub_id
self.live_address = live_address
self.name = name
self.phone_no = phone_no
self.plate_no = plate_no
self.profile_id = profile_id
self.scene_type = scene_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.biz_id is not None:
result['BizId'] = self.biz_id
if self.catalog_id is not None:
result['CatalogId'] = self.catalog_id
if self.face_url is not None:
result['FaceUrl'] = self.face_url
if self.gender is not None:
result['Gender'] = self.gender
if self.id_number is not None:
result['IdNumber'] = self.id_number
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.live_address is not None:
result['LiveAddress'] = self.live_address
if self.name is not None:
result['Name'] = self.name
if self.phone_no is not None:
result['PhoneNo'] = self.phone_no
if self.plate_no is not None:
result['PlateNo'] = self.plate_no
if self.profile_id is not None:
result['ProfileId'] = self.profile_id
if self.scene_type is not None:
result['SceneType'] = self.scene_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BizId') is not None:
self.biz_id = m.get('BizId')
if m.get('CatalogId') is not None:
self.catalog_id = m.get('CatalogId')
if m.get('FaceUrl') is not None:
self.face_url = m.get('FaceUrl')
if m.get('Gender') is not None:
self.gender = m.get('Gender')
if m.get('IdNumber') is not None:
self.id_number = m.get('IdNumber')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('LiveAddress') is not None:
self.live_address = m.get('LiveAddress')
if m.get('Name') is not None:
self.name = m.get('Name')
if m.get('PhoneNo') is not None:
self.phone_no = m.get('PhoneNo')
if m.get('PlateNo') is not None:
self.plate_no = m.get('PlateNo')
if m.get('ProfileId') is not None:
self.profile_id = m.get('ProfileId')
if m.get('SceneType') is not None:
self.scene_type = m.get('SceneType')
return self
class AddProfileResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: AddProfileResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = AddProfileResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class AddProfileResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: AddProfileResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = AddProfileResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class AddProfileCatalogRequest(TeaModel):
def __init__(
self,
catalog_name: str = None,
corp_id: str = None,
isv_sub_id: str = None,
parent_catalog_id: int = None,
):
self.catalog_name = catalog_name
self.corp_id = corp_id
self.isv_sub_id = isv_sub_id
self.parent_catalog_id = parent_catalog_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.catalog_name is not None:
result['CatalogName'] = self.catalog_name
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.parent_catalog_id is not None:
result['ParentCatalogId'] = self.parent_catalog_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CatalogName') is not None:
self.catalog_name = m.get('CatalogName')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('ParentCatalogId') is not None:
self.parent_catalog_id = m.get('ParentCatalogId')
return self
class AddProfileCatalogResponseBodyData(TeaModel):
def __init__(
self,
catalog_id: int = None,
catalog_name: str = None,
isv_sub_id: str = None,
):
self.catalog_id = catalog_id
self.catalog_name = catalog_name
self.isv_sub_id = isv_sub_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.catalog_id is not None:
result['CatalogId'] = self.catalog_id
if self.catalog_name is not None:
result['CatalogName'] = self.catalog_name
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CatalogId') is not None:
self.catalog_id = m.get('CatalogId')
if m.get('CatalogName') is not None:
self.catalog_name = m.get('CatalogName')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
return self
class AddProfileCatalogResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: AddProfileCatalogResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = AddProfileCatalogResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class AddProfileCatalogResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: AddProfileCatalogResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = AddProfileCatalogResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class AddSearchItemsRequestSearchItemList(TeaModel):
def __init__(
self,
item_image_url: str = None,
search_item_id: str = None,
search_item_name: str = None,
):
self.item_image_url = item_image_url
self.search_item_id = search_item_id
self.search_item_name = search_item_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.item_image_url is not None:
result['ItemImageUrl'] = self.item_image_url
if self.search_item_id is not None:
result['SearchItemId'] = self.search_item_id
if self.search_item_name is not None:
result['SearchItemName'] = self.search_item_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ItemImageUrl') is not None:
self.item_image_url = m.get('ItemImageUrl')
if m.get('SearchItemId') is not None:
self.search_item_id = m.get('SearchItemId')
if m.get('SearchItemName') is not None:
self.search_item_name = m.get('SearchItemName')
return self
class AddSearchItemsRequest(TeaModel):
def __init__(
self,
search_item_list: List[AddSearchItemsRequestSearchItemList] = None,
search_table_id: str = None,
):
self.search_item_list = search_item_list
self.search_table_id = search_table_id
def validate(self):
if self.search_item_list:
for k in self.search_item_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['SearchItemList'] = []
if self.search_item_list is not None:
for k in self.search_item_list:
result['SearchItemList'].append(k.to_map() if k else None)
if self.search_table_id is not None:
result['SearchTableId'] = self.search_table_id
return result
def from_map(self, m: dict = None):
m = m or dict()
self.search_item_list = []
if m.get('SearchItemList') is not None:
for k in m.get('SearchItemList'):
temp_model = AddSearchItemsRequestSearchItemList()
self.search_item_list.append(temp_model.from_map(k))
if m.get('SearchTableId') is not None:
self.search_table_id = m.get('SearchTableId')
return self
class AddSearchItemsShrinkRequest(TeaModel):
def __init__(
self,
search_item_list_shrink: str = None,
search_table_id: str = None,
):
self.search_item_list_shrink = search_item_list_shrink
self.search_table_id = search_table_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.search_item_list_shrink is not None:
result['SearchItemList'] = self.search_item_list_shrink
if self.search_table_id is not None:
result['SearchTableId'] = self.search_table_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('SearchItemList') is not None:
self.search_item_list_shrink = m.get('SearchItemList')
if m.get('SearchTableId') is not None:
self.search_table_id = m.get('SearchTableId')
return self
class AddSearchItemsResponseBodyData(TeaModel):
def __init__(
self,
added_item_ids: str = None,
failed_item_ids: str = None,
):
self.added_item_ids = added_item_ids
self.failed_item_ids = failed_item_ids
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.added_item_ids is not None:
result['AddedItemIds'] = self.added_item_ids
if self.failed_item_ids is not None:
result['FailedItemIds'] = self.failed_item_ids
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AddedItemIds') is not None:
self.added_item_ids = m.get('AddedItemIds')
if m.get('FailedItemIds') is not None:
self.failed_item_ids = m.get('FailedItemIds')
return self
class AddSearchItemsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: AddSearchItemsResponseBodyData = None,
message: str = None,
request_id: str = None,
success: str = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = AddSearchItemsResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class AddSearchItemsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: AddSearchItemsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = AddSearchItemsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class AddWatchItemsRequestWatchItemList(TeaModel):
def __init__(
self,
item_attributes: str = None,
item_image_url: str = None,
watch_item_id: str = None,
watch_item_name: str = None,
):
self.item_attributes = item_attributes
self.item_image_url = item_image_url
self.watch_item_id = watch_item_id
self.watch_item_name = watch_item_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.item_attributes is not None:
result['ItemAttributes'] = self.item_attributes
if self.item_image_url is not None:
result['ItemImageUrl'] = self.item_image_url
if self.watch_item_id is not None:
result['WatchItemId'] = self.watch_item_id
if self.watch_item_name is not None:
result['WatchItemName'] = self.watch_item_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ItemAttributes') is not None:
self.item_attributes = m.get('ItemAttributes')
if m.get('ItemImageUrl') is not None:
self.item_image_url = m.get('ItemImageUrl')
if m.get('WatchItemId') is not None:
self.watch_item_id = m.get('WatchItemId')
if m.get('WatchItemName') is not None:
self.watch_item_name = m.get('WatchItemName')
return self
class AddWatchItemsRequest(TeaModel):
def __init__(
self,
watch_item_list: List[AddWatchItemsRequestWatchItemList] = None,
watch_policy_id: str = None,
):
self.watch_item_list = watch_item_list
self.watch_policy_id = watch_policy_id
def validate(self):
if self.watch_item_list:
for k in self.watch_item_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['WatchItemList'] = []
if self.watch_item_list is not None:
for k in self.watch_item_list:
result['WatchItemList'].append(k.to_map() if k else None)
if self.watch_policy_id is not None:
result['WatchPolicyId'] = self.watch_policy_id
return result
def from_map(self, m: dict = None):
m = m or dict()
self.watch_item_list = []
if m.get('WatchItemList') is not None:
for k in m.get('WatchItemList'):
temp_model = AddWatchItemsRequestWatchItemList()
self.watch_item_list.append(temp_model.from_map(k))
if m.get('WatchPolicyId') is not None:
self.watch_policy_id = m.get('WatchPolicyId')
return self
class AddWatchItemsResponseBodyData(TeaModel):
def __init__(
self,
added_item_ids: str = None,
failed_item_ids: str = None,
):
self.added_item_ids = added_item_ids
self.failed_item_ids = failed_item_ids
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.added_item_ids is not None:
result['AddedItemIds'] = self.added_item_ids
if self.failed_item_ids is not None:
result['FailedItemIds'] = self.failed_item_ids
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AddedItemIds') is not None:
self.added_item_ids = m.get('AddedItemIds')
if m.get('FailedItemIds') is not None:
self.failed_item_ids = m.get('FailedItemIds')
return self
class AddWatchItemsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: AddWatchItemsResponseBodyData = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = AddWatchItemsResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class AddWatchItemsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: AddWatchItemsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = AddWatchItemsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class BatchDeleteInstanceRequest(TeaModel):
def __init__(
self,
instance_ids: List[str] = None,
project_id: str = None,
):
self.instance_ids = instance_ids
self.project_id = project_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.instance_ids is not None:
result['InstanceIds'] = self.instance_ids
if self.project_id is not None:
result['ProjectId'] = self.project_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('InstanceIds') is not None:
self.instance_ids = m.get('InstanceIds')
if m.get('ProjectId') is not None:
self.project_id = m.get('ProjectId')
return self
class BatchDeleteInstanceShrinkRequest(TeaModel):
def __init__(
self,
instance_ids_shrink: str = None,
project_id: str = None,
):
self.instance_ids_shrink = instance_ids_shrink
self.project_id = project_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.instance_ids_shrink is not None:
result['InstanceIds'] = self.instance_ids_shrink
if self.project_id is not None:
result['ProjectId'] = self.project_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('InstanceIds') is not None:
self.instance_ids_shrink = m.get('InstanceIds')
if m.get('ProjectId') is not None:
self.project_id = m.get('ProjectId')
return self
class BatchDeleteInstanceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.message = message
self.request_id = request_id
self.success = success
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class BatchDeleteInstanceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: BatchDeleteInstanceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = BatchDeleteInstanceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class BindCorpGroupRequest(TeaModel):
def __init__(
self,
corp_group_id: str = None,
corp_id: str = None,
):
self.corp_group_id = corp_group_id
self.corp_id = corp_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_group_id is not None:
result['CorpGroupId'] = self.corp_group_id
if self.corp_id is not None:
result['CorpId'] = self.corp_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpGroupId') is not None:
self.corp_group_id = m.get('CorpGroupId')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
return self
class BindCorpGroupResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.message = message
self.request_id = request_id
self.success = success
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class BindCorpGroupResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: BindCorpGroupResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = BindCorpGroupResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class BindDevicesRequestSubDeviceList(TeaModel):
def __init__(
self,
access_protocol: str = None,
login_name: str = None,
pin: str = None,
sub_device_ip: str = None,
sub_device_name: str = None,
sub_device_port: str = None,
):
self.access_protocol = access_protocol
self.login_name = login_name
self.pin = pin
self.sub_device_ip = sub_device_ip
self.sub_device_name = sub_device_name
self.sub_device_port = sub_device_port
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.access_protocol is not None:
result['AccessProtocol'] = self.access_protocol
if self.login_name is not None:
result['LoginName'] = self.login_name
if self.pin is not None:
result['PIN'] = self.pin
if self.sub_device_ip is not None:
result['SubDeviceIp'] = self.sub_device_ip
if self.sub_device_name is not None:
result['SubDeviceName'] = self.sub_device_name
if self.sub_device_port is not None:
result['SubDevicePort'] = self.sub_device_port
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AccessProtocol') is not None:
self.access_protocol = m.get('AccessProtocol')
if m.get('LoginName') is not None:
self.login_name = m.get('LoginName')
if m.get('PIN') is not None:
self.pin = m.get('PIN')
if m.get('SubDeviceIp') is not None:
self.sub_device_ip = m.get('SubDeviceIp')
if m.get('SubDeviceName') is not None:
self.sub_device_name = m.get('SubDeviceName')
if m.get('SubDevicePort') is not None:
self.sub_device_port = m.get('SubDevicePort')
return self
class BindDevicesRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
device_id: str = None,
sub_device_list: List[BindDevicesRequestSubDeviceList] = None,
):
self.corp_id = corp_id
self.device_id = device_id
self.sub_device_list = sub_device_list
def validate(self):
if self.sub_device_list:
for k in self.sub_device_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.device_id is not None:
result['DeviceId'] = self.device_id
result['SubDeviceList'] = []
if self.sub_device_list is not None:
for k in self.sub_device_list:
result['SubDeviceList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
self.sub_device_list = []
if m.get('SubDeviceList') is not None:
for k in m.get('SubDeviceList'):
temp_model = BindDevicesRequestSubDeviceList()
self.sub_device_list.append(temp_model.from_map(k))
return self
class BindDevicesShrinkRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
device_id: str = None,
sub_device_list_shrink: str = None,
):
self.corp_id = corp_id
self.device_id = device_id
self.sub_device_list_shrink = sub_device_list_shrink
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.sub_device_list_shrink is not None:
result['SubDeviceList'] = self.sub_device_list_shrink
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('SubDeviceList') is not None:
self.sub_device_list_shrink = m.get('SubDeviceList')
return self
class BindDevicesResponseBodyDataSubDeviceList(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
sub_device_id: str = None,
sub_device_ip: str = None,
sub_device_name: str = None,
sub_device_num: str = None,
sub_device_port: str = None,
):
self.code = code
self.message = message
self.sub_device_id = sub_device_id
self.sub_device_ip = sub_device_ip
self.sub_device_name = sub_device_name
self.sub_device_num = sub_device_num
self.sub_device_port = sub_device_port
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.sub_device_id is not None:
result['SubDeviceId'] = self.sub_device_id
if self.sub_device_ip is not None:
result['SubDeviceIp'] = self.sub_device_ip
if self.sub_device_name is not None:
result['SubDeviceName'] = self.sub_device_name
if self.sub_device_num is not None:
result['SubDeviceNum'] = self.sub_device_num
if self.sub_device_port is not None:
result['SubDevicePort'] = self.sub_device_port
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('SubDeviceId') is not None:
self.sub_device_id = m.get('SubDeviceId')
if m.get('SubDeviceIp') is not None:
self.sub_device_ip = m.get('SubDeviceIp')
if m.get('SubDeviceName') is not None:
self.sub_device_name = m.get('SubDeviceName')
if m.get('SubDeviceNum') is not None:
self.sub_device_num = m.get('SubDeviceNum')
if m.get('SubDevicePort') is not None:
self.sub_device_port = m.get('SubDevicePort')
return self
class BindDevicesResponseBodyData(TeaModel):
def __init__(
self,
sub_device_list: List[BindDevicesResponseBodyDataSubDeviceList] = None,
):
self.sub_device_list = sub_device_list
def validate(self):
if self.sub_device_list:
for k in self.sub_device_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['SubDeviceList'] = []
if self.sub_device_list is not None:
for k in self.sub_device_list:
result['SubDeviceList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
self.sub_device_list = []
if m.get('SubDeviceList') is not None:
for k in m.get('SubDeviceList'):
temp_model = BindDevicesResponseBodyDataSubDeviceList()
self.sub_device_list.append(temp_model.from_map(k))
return self
class BindDevicesResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: BindDevicesResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = BindDevicesResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class BindDevicesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: BindDevicesResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = BindDevicesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class BindPersonRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
isv_sub_id: str = None,
person_id: str = None,
person_matching_rate: str = None,
profile_id: int = None,
):
self.corp_id = corp_id
self.isv_sub_id = isv_sub_id
self.person_id = person_id
self.person_matching_rate = person_matching_rate
self.profile_id = profile_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.person_matching_rate is not None:
result['PersonMatchingRate'] = self.person_matching_rate
if self.profile_id is not None:
result['ProfileId'] = self.profile_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('PersonMatchingRate') is not None:
self.person_matching_rate = m.get('PersonMatchingRate')
if m.get('ProfileId') is not None:
self.profile_id = m.get('ProfileId')
return self
class BindPersonResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: bool = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class BindPersonResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: BindPersonResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = BindPersonResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class BindUserRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
isv_sub_id: str = None,
matching_rate: str = None,
person_id: str = None,
user_id: int = None,
):
self.corp_id = corp_id
self.isv_sub_id = isv_sub_id
self.matching_rate = matching_rate
self.person_id = person_id
self.user_id = user_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.matching_rate is not None:
result['MatchingRate'] = self.matching_rate
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.user_id is not None:
result['UserId'] = self.user_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('MatchingRate') is not None:
self.matching_rate = m.get('MatchingRate')
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('UserId') is not None:
self.user_id = m.get('UserId')
return self
class BindUserResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: bool = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class BindUserResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: BindUserResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = BindUserResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CheckAIInstanceNameRequest(TeaModel):
def __init__(
self,
instance_name: str = None,
instance_type: str = None,
project_id: str = None,
):
# 实例名称
self.instance_name = instance_name
# 实例类型
self.instance_type = instance_type
# 所属项目Id
self.project_id = project_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.instance_name is not None:
result['InstanceName'] = self.instance_name
if self.instance_type is not None:
result['InstanceType'] = self.instance_type
if self.project_id is not None:
result['ProjectId'] = self.project_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('InstanceName') is not None:
self.instance_name = m.get('InstanceName')
if m.get('InstanceType') is not None:
self.instance_type = m.get('InstanceType')
if m.get('ProjectId') is not None:
self.project_id = m.get('ProjectId')
return self
class CheckAIInstanceNameResponseBodyData(TeaModel):
def __init__(
self,
available: bool = None,
):
self.available = available
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.available is not None:
result['Available'] = self.available
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Available') is not None:
self.available = m.get('Available')
return self
class CheckAIInstanceNameResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: CheckAIInstanceNameResponseBodyData = None,
message: str = None,
request_id: str = None,
):
# 返回code码
self.code = code
# 返回数据
self.data = data
# 错误信息
self.message = message
# 请求id
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = CheckAIInstanceNameResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class CheckAIInstanceNameResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CheckAIInstanceNameResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CheckAIInstanceNameResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CheckSLRRequest(TeaModel):
def __init__(
self,
service_name: str = None,
):
self.service_name = service_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.service_name is not None:
result['ServiceName'] = self.service_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ServiceName') is not None:
self.service_name = m.get('ServiceName')
return self
class CheckSLRResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: str = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class CheckSLRResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CheckSLRResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CheckSLRResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ControlAiotDeviceRequestMiFareCard(TeaModel):
def __init__(
self,
area_code: int = None,
area_deviate: int = None,
area_len: int = None,
enabled: str = None,
key_type: int = None,
last_change: str = None,
secret_key: str = None,
):
self.area_code = area_code
self.area_deviate = area_deviate
self.area_len = area_len
self.enabled = enabled
self.key_type = key_type
self.last_change = last_change
self.secret_key = secret_key
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.area_code is not None:
result['AreaCode'] = self.area_code
if self.area_deviate is not None:
result['AreaDeviate'] = self.area_deviate
if self.area_len is not None:
result['AreaLen'] = self.area_len
if self.enabled is not None:
result['Enabled'] = self.enabled
if self.key_type is not None:
result['KeyType'] = self.key_type
if self.last_change is not None:
result['LastChange'] = self.last_change
if self.secret_key is not None:
result['SecretKey'] = self.secret_key
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AreaCode') is not None:
self.area_code = m.get('AreaCode')
if m.get('AreaDeviate') is not None:
self.area_deviate = m.get('AreaDeviate')
if m.get('AreaLen') is not None:
self.area_len = m.get('AreaLen')
if m.get('Enabled') is not None:
self.enabled = m.get('Enabled')
if m.get('KeyType') is not None:
self.key_type = m.get('KeyType')
if m.get('LastChange') is not None:
self.last_change = m.get('LastChange')
if m.get('SecretKey') is not None:
self.secret_key = m.get('SecretKey')
return self
class ControlAiotDeviceRequest(TeaModel):
def __init__(
self,
associated_device_id: str = None,
associated_ipaddr: str = None,
associated_port: int = None,
associated_verification_enable: str = None,
barrier_command: int = None,
check_enabled: str = None,
command_type: int = None,
double_verification_group_enabled: str = None,
gate_ctl_status: int = None,
id: str = None,
identity_number: str = None,
is_proxy: bool = None,
mi_fare_card: ControlAiotDeviceRequestMiFareCard = None,
name: str = None,
reboot_device: str = None,
single_interval: int = None,
super_password: str = None,
upgrade_file_url: str = None,
):
self.associated_device_id = associated_device_id
self.associated_ipaddr = associated_ipaddr
self.associated_port = associated_port
self.associated_verification_enable = associated_verification_enable
self.barrier_command = barrier_command
self.check_enabled = check_enabled
self.command_type = command_type
self.double_verification_group_enabled = double_verification_group_enabled
self.gate_ctl_status = gate_ctl_status
self.id = id
self.identity_number = identity_number
self.is_proxy = is_proxy
self.mi_fare_card = mi_fare_card
self.name = name
self.reboot_device = reboot_device
self.single_interval = single_interval
self.super_password = super_password
self.upgrade_file_url = upgrade_file_url
def validate(self):
if self.mi_fare_card:
self.mi_fare_card.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.associated_device_id is not None:
result['AssociatedDeviceId'] = self.associated_device_id
if self.associated_ipaddr is not None:
result['AssociatedIPAddr'] = self.associated_ipaddr
if self.associated_port is not None:
result['AssociatedPort'] = self.associated_port
if self.associated_verification_enable is not None:
result['AssociatedVerificationEnable'] = self.associated_verification_enable
if self.barrier_command is not None:
result['BarrierCommand'] = self.barrier_command
if self.check_enabled is not None:
result['CheckEnabled'] = self.check_enabled
if self.command_type is not None:
result['CommandType'] = self.command_type
if self.double_verification_group_enabled is not None:
result['DoubleVerificationGroupEnabled'] = self.double_verification_group_enabled
if self.gate_ctl_status is not None:
result['GateCtlStatus'] = self.gate_ctl_status
if self.id is not None:
result['Id'] = self.id
if self.identity_number is not None:
result['IdentityNumber'] = self.identity_number
if self.is_proxy is not None:
result['IsProxy'] = self.is_proxy
if self.mi_fare_card is not None:
result['MiFareCard'] = self.mi_fare_card.to_map()
if self.name is not None:
result['Name'] = self.name
if self.reboot_device is not None:
result['RebootDevice'] = self.reboot_device
if self.single_interval is not None:
result['SingleInterval'] = self.single_interval
if self.super_password is not None:
result['SuperPassword'] = self.super_password
if self.upgrade_file_url is not None:
result['UpgradeFileURL'] = self.upgrade_file_url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AssociatedDeviceId') is not None:
self.associated_device_id = m.get('AssociatedDeviceId')
if m.get('AssociatedIPAddr') is not None:
self.associated_ipaddr = m.get('AssociatedIPAddr')
if m.get('AssociatedPort') is not None:
self.associated_port = m.get('AssociatedPort')
if m.get('AssociatedVerificationEnable') is not None:
self.associated_verification_enable = m.get('AssociatedVerificationEnable')
if m.get('BarrierCommand') is not None:
self.barrier_command = m.get('BarrierCommand')
if m.get('CheckEnabled') is not None:
self.check_enabled = m.get('CheckEnabled')
if m.get('CommandType') is not None:
self.command_type = m.get('CommandType')
if m.get('DoubleVerificationGroupEnabled') is not None:
self.double_verification_group_enabled = m.get('DoubleVerificationGroupEnabled')
if m.get('GateCtlStatus') is not None:
self.gate_ctl_status = m.get('GateCtlStatus')
if m.get('Id') is not None:
self.id = m.get('Id')
if m.get('IdentityNumber') is not None:
self.identity_number = m.get('IdentityNumber')
if m.get('IsProxy') is not None:
self.is_proxy = m.get('IsProxy')
if m.get('MiFareCard') is not None:
temp_model = ControlAiotDeviceRequestMiFareCard()
self.mi_fare_card = temp_model.from_map(m['MiFareCard'])
if m.get('Name') is not None:
self.name = m.get('Name')
if m.get('RebootDevice') is not None:
self.reboot_device = m.get('RebootDevice')
if m.get('SingleInterval') is not None:
self.single_interval = m.get('SingleInterval')
if m.get('SuperPassword') is not None:
self.super_password = m.get('SuperPassword')
if m.get('UpgradeFileURL') is not None:
self.upgrade_file_url = m.get('UpgradeFileURL')
return self
class ControlAiotDeviceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ControlAiotDeviceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ControlAiotDeviceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ControlAiotDeviceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateAIInstanceRequestDataSourceTimes(TeaModel):
def __init__(
self,
end_time: str = None,
start_time: str = None,
):
# 视频结束时间
self.end_time = end_time
# 视频开始时间
self.start_time = start_time
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.start_time is not None:
result['StartTime'] = self.start_time
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('StartTime') is not None:
self.start_time = m.get('StartTime')
return self
class CreateAIInstanceRequestScheduleTimes(TeaModel):
def __init__(
self,
end_time: str = None,
start_time: str = None,
):
# 结束执行时间
self.end_time = end_time
# 开始执行时间
self.start_time = start_time
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.start_time is not None:
result['StartTime'] = self.start_time
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('StartTime') is not None:
self.start_time = m.get('StartTime')
return self
class CreateAIInstanceRequest(TeaModel):
def __init__(
self,
algorithm_id: str = None,
algorithm_name: str = None,
compute_type: str = None,
data_source: str = None,
data_source_times: List[CreateAIInstanceRequestDataSourceTimes] = None,
data_type: str = None,
instance_name: str = None,
instance_type: str = None,
project_id: str = None,
schedule_cycle_dates: List[int] = None,
schedule_times: List[CreateAIInstanceRequestScheduleTimes] = None,
schedule_type: str = None,
spf: int = None,
):
# 算子Id
self.algorithm_id = algorithm_id
# 算法名称
self.algorithm_name = algorithm_name
# 计算类型
self.compute_type = compute_type
# 数据来源
self.data_source = data_source
# 数据源时间段
self.data_source_times = data_source_times
# 数据类型
self.data_type = data_type
# 实例名称
self.instance_name = instance_name
# 实例类型
self.instance_type = instance_type
# 所属项目Id
self.project_id = project_id
# ScheduleType为EVERY_WEEK、EVERY_MONTH时必填
self.schedule_cycle_dates = schedule_cycle_dates
# 执行时间段,支持多段,指定多段时不能重合交错。
self.schedule_times = schedule_times
# 调度类型
self.schedule_type = schedule_type
# 多少秒抽取1帧,取值范围[0, 3600]。 0表示不抽帧。
self.spf = spf
def validate(self):
if self.data_source_times:
for k in self.data_source_times:
if k:
k.validate()
if self.schedule_times:
for k in self.schedule_times:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_id is not None:
result['AlgorithmId'] = self.algorithm_id
if self.algorithm_name is not None:
result['AlgorithmName'] = self.algorithm_name
if self.compute_type is not None:
result['ComputeType'] = self.compute_type
if self.data_source is not None:
result['DataSource'] = self.data_source
result['DataSourceTimes'] = []
if self.data_source_times is not None:
for k in self.data_source_times:
result['DataSourceTimes'].append(k.to_map() if k else None)
if self.data_type is not None:
result['DataType'] = self.data_type
if self.instance_name is not None:
result['InstanceName'] = self.instance_name
if self.instance_type is not None:
result['InstanceType'] = self.instance_type
if self.project_id is not None:
result['ProjectId'] = self.project_id
if self.schedule_cycle_dates is not None:
result['ScheduleCycleDates'] = self.schedule_cycle_dates
result['ScheduleTimes'] = []
if self.schedule_times is not None:
for k in self.schedule_times:
result['ScheduleTimes'].append(k.to_map() if k else None)
if self.schedule_type is not None:
result['ScheduleType'] = self.schedule_type
if self.spf is not None:
result['Spf'] = self.spf
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmId') is not None:
self.algorithm_id = m.get('AlgorithmId')
if m.get('AlgorithmName') is not None:
self.algorithm_name = m.get('AlgorithmName')
if m.get('ComputeType') is not None:
self.compute_type = m.get('ComputeType')
if m.get('DataSource') is not None:
self.data_source = m.get('DataSource')
self.data_source_times = []
if m.get('DataSourceTimes') is not None:
for k in m.get('DataSourceTimes'):
temp_model = CreateAIInstanceRequestDataSourceTimes()
self.data_source_times.append(temp_model.from_map(k))
if m.get('DataType') is not None:
self.data_type = m.get('DataType')
if m.get('InstanceName') is not None:
self.instance_name = m.get('InstanceName')
if m.get('InstanceType') is not None:
self.instance_type = m.get('InstanceType')
if m.get('ProjectId') is not None:
self.project_id = m.get('ProjectId')
if m.get('ScheduleCycleDates') is not None:
self.schedule_cycle_dates = m.get('ScheduleCycleDates')
self.schedule_times = []
if m.get('ScheduleTimes') is not None:
for k in m.get('ScheduleTimes'):
temp_model = CreateAIInstanceRequestScheduleTimes()
self.schedule_times.append(temp_model.from_map(k))
if m.get('ScheduleType') is not None:
self.schedule_type = m.get('ScheduleType')
if m.get('Spf') is not None:
self.spf = m.get('Spf')
return self
class CreateAIInstanceShrinkRequest(TeaModel):
def __init__(
self,
algorithm_id: str = None,
algorithm_name: str = None,
compute_type: str = None,
data_source: str = None,
data_source_times_shrink: str = None,
data_type: str = None,
instance_name: str = None,
instance_type: str = None,
project_id: str = None,
schedule_cycle_dates_shrink: str = None,
schedule_times_shrink: str = None,
schedule_type: str = None,
spf: int = None,
):
# 算子Id
self.algorithm_id = algorithm_id
# 算法名称
self.algorithm_name = algorithm_name
# 计算类型
self.compute_type = compute_type
# 数据来源
self.data_source = data_source
# 数据源时间段
self.data_source_times_shrink = data_source_times_shrink
# 数据类型
self.data_type = data_type
# 实例名称
self.instance_name = instance_name
# 实例类型
self.instance_type = instance_type
# 所属项目Id
self.project_id = project_id
# ScheduleType为EVERY_WEEK、EVERY_MONTH时必填
self.schedule_cycle_dates_shrink = schedule_cycle_dates_shrink
# 执行时间段,支持多段,指定多段时不能重合交错。
self.schedule_times_shrink = schedule_times_shrink
# 调度类型
self.schedule_type = schedule_type
# 多少秒抽取1帧,取值范围[0, 3600]。 0表示不抽帧。
self.spf = spf
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_id is not None:
result['AlgorithmId'] = self.algorithm_id
if self.algorithm_name is not None:
result['AlgorithmName'] = self.algorithm_name
if self.compute_type is not None:
result['ComputeType'] = self.compute_type
if self.data_source is not None:
result['DataSource'] = self.data_source
if self.data_source_times_shrink is not None:
result['DataSourceTimes'] = self.data_source_times_shrink
if self.data_type is not None:
result['DataType'] = self.data_type
if self.instance_name is not None:
result['InstanceName'] = self.instance_name
if self.instance_type is not None:
result['InstanceType'] = self.instance_type
if self.project_id is not None:
result['ProjectId'] = self.project_id
if self.schedule_cycle_dates_shrink is not None:
result['ScheduleCycleDates'] = self.schedule_cycle_dates_shrink
if self.schedule_times_shrink is not None:
result['ScheduleTimes'] = self.schedule_times_shrink
if self.schedule_type is not None:
result['ScheduleType'] = self.schedule_type
if self.spf is not None:
result['Spf'] = self.spf
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmId') is not None:
self.algorithm_id = m.get('AlgorithmId')
if m.get('AlgorithmName') is not None:
self.algorithm_name = m.get('AlgorithmName')
if m.get('ComputeType') is not None:
self.compute_type = m.get('ComputeType')
if m.get('DataSource') is not None:
self.data_source = m.get('DataSource')
if m.get('DataSourceTimes') is not None:
self.data_source_times_shrink = m.get('DataSourceTimes')
if m.get('DataType') is not None:
self.data_type = m.get('DataType')
if m.get('InstanceName') is not None:
self.instance_name = m.get('InstanceName')
if m.get('InstanceType') is not None:
self.instance_type = m.get('InstanceType')
if m.get('ProjectId') is not None:
self.project_id = m.get('ProjectId')
if m.get('ScheduleCycleDates') is not None:
self.schedule_cycle_dates_shrink = m.get('ScheduleCycleDates')
if m.get('ScheduleTimes') is not None:
self.schedule_times_shrink = m.get('ScheduleTimes')
if m.get('ScheduleType') is not None:
self.schedule_type = m.get('ScheduleType')
if m.get('Spf') is not None:
self.spf = m.get('Spf')
return self
class CreateAIInstanceResponseBodyData(TeaModel):
def __init__(
self,
acu_used: int = None,
instance_id: str = None,
):
# 当前实例ACU使用量
self.acu_used = acu_used
# 实例Id
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.acu_used is not None:
result['AcuUsed'] = self.acu_used
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AcuUsed') is not None:
self.acu_used = m.get('AcuUsed')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class CreateAIInstanceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: CreateAIInstanceResponseBodyData = None,
message: str = None,
request_id: str = None,
):
# 返回code码
self.code = code
# 返回数据
self.data = data
# 错误信息
self.message = message
# 请求id
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = CreateAIInstanceResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class CreateAIInstanceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CreateAIInstanceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CreateAIInstanceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateComputeInstanceRequestAlgorithms(TeaModel):
def __init__(
self,
algorithm_id: str = None,
algorithm_name: str = None,
):
self.algorithm_id = algorithm_id
self.algorithm_name = algorithm_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_id is not None:
result['AlgorithmId'] = self.algorithm_id
if self.algorithm_name is not None:
result['AlgorithmName'] = self.algorithm_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmId') is not None:
self.algorithm_id = m.get('AlgorithmId')
if m.get('AlgorithmName') is not None:
self.algorithm_name = m.get('AlgorithmName')
return self
class CreateComputeInstanceRequestDevices(TeaModel):
def __init__(
self,
bit_rate: str = None,
coding_format: str = None,
device_id: str = None,
region_id: str = None,
resolving_power: str = None,
start_stream: bool = None,
):
self.bit_rate = bit_rate
self.coding_format = coding_format
self.device_id = device_id
self.region_id = region_id
self.resolving_power = resolving_power
self.start_stream = start_stream
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.bit_rate is not None:
result['BitRate'] = self.bit_rate
if self.coding_format is not None:
result['CodingFormat'] = self.coding_format
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.region_id is not None:
result['RegionId'] = self.region_id
if self.resolving_power is not None:
result['ResolvingPower'] = self.resolving_power
if self.start_stream is not None:
result['StartStream'] = self.start_stream
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BitRate') is not None:
self.bit_rate = m.get('BitRate')
if m.get('CodingFormat') is not None:
self.coding_format = m.get('CodingFormat')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('RegionId') is not None:
self.region_id = m.get('RegionId')
if m.get('ResolvingPower') is not None:
self.resolving_power = m.get('ResolvingPower')
if m.get('StartStream') is not None:
self.start_stream = m.get('StartStream')
return self
class CreateComputeInstanceRequest(TeaModel):
def __init__(
self,
acu_used: int = None,
algorithm_type: str = None,
algorithms: List[CreateComputeInstanceRequestAlgorithms] = None,
compute_picture_type: str = None,
compute_picture_value: str = None,
datasource_type: str = None,
devices: List[CreateComputeInstanceRequestDevices] = None,
instance_name: str = None,
is_frame_extraction: str = None,
is_polling: bool = None,
overall_execution_time: str = None,
pic_topic: str = None,
pic_type: str = None,
polling_configs: str = None,
project_id: str = None,
schedule_cycle_date: str = None,
schedule_day: str = None,
schedule_day_size: str = None,
schedule_times: str = None,
schedule_type: str = None,
slice_execution_time: str = None,
storage_used: str = None,
):
self.acu_used = acu_used
self.algorithm_type = algorithm_type
self.algorithms = algorithms
self.compute_picture_type = compute_picture_type
self.compute_picture_value = compute_picture_value
self.datasource_type = datasource_type
self.devices = devices
self.instance_name = instance_name
self.is_frame_extraction = is_frame_extraction
self.is_polling = is_polling
self.overall_execution_time = overall_execution_time
self.pic_topic = pic_topic
self.pic_type = pic_type
self.polling_configs = polling_configs
self.project_id = project_id
self.schedule_cycle_date = schedule_cycle_date
self.schedule_day = schedule_day
self.schedule_day_size = schedule_day_size
self.schedule_times = schedule_times
self.schedule_type = schedule_type
self.slice_execution_time = slice_execution_time
self.storage_used = storage_used
def validate(self):
if self.algorithms:
for k in self.algorithms:
if k:
k.validate()
if self.devices:
for k in self.devices:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.acu_used is not None:
result['AcuUsed'] = self.acu_used
if self.algorithm_type is not None:
result['AlgorithmType'] = self.algorithm_type
result['Algorithms'] = []
if self.algorithms is not None:
for k in self.algorithms:
result['Algorithms'].append(k.to_map() if k else None)
if self.compute_picture_type is not None:
result['ComputePictureType'] = self.compute_picture_type
if self.compute_picture_value is not None:
result['ComputePictureValue'] = self.compute_picture_value
if self.datasource_type is not None:
result['DatasourceType'] = self.datasource_type
result['Devices'] = []
if self.devices is not None:
for k in self.devices:
result['Devices'].append(k.to_map() if k else None)
if self.instance_name is not None:
result['InstanceName'] = self.instance_name
if self.is_frame_extraction is not None:
result['IsFrameExtraction'] = self.is_frame_extraction
if self.is_polling is not None:
result['IsPolling'] = self.is_polling
if self.overall_execution_time is not None:
result['OverallExecutionTime'] = self.overall_execution_time
if self.pic_topic is not None:
result['PicTopic'] = self.pic_topic
if self.pic_type is not None:
result['PicType'] = self.pic_type
if self.polling_configs is not None:
result['PollingConfigs'] = self.polling_configs
if self.project_id is not None:
result['ProjectId'] = self.project_id
if self.schedule_cycle_date is not None:
result['ScheduleCycleDate'] = self.schedule_cycle_date
if self.schedule_day is not None:
result['ScheduleDay'] = self.schedule_day
if self.schedule_day_size is not None:
result['ScheduleDaySize'] = self.schedule_day_size
if self.schedule_times is not None:
result['ScheduleTimes'] = self.schedule_times
if self.schedule_type is not None:
result['ScheduleType'] = self.schedule_type
if self.slice_execution_time is not None:
result['SliceExecutionTime'] = self.slice_execution_time
if self.storage_used is not None:
result['StorageUsed'] = self.storage_used
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AcuUsed') is not None:
self.acu_used = m.get('AcuUsed')
if m.get('AlgorithmType') is not None:
self.algorithm_type = m.get('AlgorithmType')
self.algorithms = []
if m.get('Algorithms') is not None:
for k in m.get('Algorithms'):
temp_model = CreateComputeInstanceRequestAlgorithms()
self.algorithms.append(temp_model.from_map(k))
if m.get('ComputePictureType') is not None:
self.compute_picture_type = m.get('ComputePictureType')
if m.get('ComputePictureValue') is not None:
self.compute_picture_value = m.get('ComputePictureValue')
if m.get('DatasourceType') is not None:
self.datasource_type = m.get('DatasourceType')
self.devices = []
if m.get('Devices') is not None:
for k in m.get('Devices'):
temp_model = CreateComputeInstanceRequestDevices()
self.devices.append(temp_model.from_map(k))
if m.get('InstanceName') is not None:
self.instance_name = m.get('InstanceName')
if m.get('IsFrameExtraction') is not None:
self.is_frame_extraction = m.get('IsFrameExtraction')
if m.get('IsPolling') is not None:
self.is_polling = m.get('IsPolling')
if m.get('OverallExecutionTime') is not None:
self.overall_execution_time = m.get('OverallExecutionTime')
if m.get('PicTopic') is not None:
self.pic_topic = m.get('PicTopic')
if m.get('PicType') is not None:
self.pic_type = m.get('PicType')
if m.get('PollingConfigs') is not None:
self.polling_configs = m.get('PollingConfigs')
if m.get('ProjectId') is not None:
self.project_id = m.get('ProjectId')
if m.get('ScheduleCycleDate') is not None:
self.schedule_cycle_date = m.get('ScheduleCycleDate')
if m.get('ScheduleDay') is not None:
self.schedule_day = m.get('ScheduleDay')
if m.get('ScheduleDaySize') is not None:
self.schedule_day_size = m.get('ScheduleDaySize')
if m.get('ScheduleTimes') is not None:
self.schedule_times = m.get('ScheduleTimes')
if m.get('ScheduleType') is not None:
self.schedule_type = m.get('ScheduleType')
if m.get('SliceExecutionTime') is not None:
self.slice_execution_time = m.get('SliceExecutionTime')
if m.get('StorageUsed') is not None:
self.storage_used = m.get('StorageUsed')
return self
class CreateComputeInstanceShrinkRequest(TeaModel):
def __init__(
self,
acu_used: int = None,
algorithm_type: str = None,
algorithms_shrink: str = None,
compute_picture_type: str = None,
compute_picture_value: str = None,
datasource_type: str = None,
devices_shrink: str = None,
instance_name: str = None,
is_frame_extraction: str = None,
is_polling: bool = None,
overall_execution_time: str = None,
pic_topic: str = None,
pic_type: str = None,
polling_configs: str = None,
project_id: str = None,
schedule_cycle_date: str = None,
schedule_day: str = None,
schedule_day_size: str = None,
schedule_times: str = None,
schedule_type: str = None,
slice_execution_time: str = None,
storage_used: str = None,
):
self.acu_used = acu_used
self.algorithm_type = algorithm_type
self.algorithms_shrink = algorithms_shrink
self.compute_picture_type = compute_picture_type
self.compute_picture_value = compute_picture_value
self.datasource_type = datasource_type
self.devices_shrink = devices_shrink
self.instance_name = instance_name
self.is_frame_extraction = is_frame_extraction
self.is_polling = is_polling
self.overall_execution_time = overall_execution_time
self.pic_topic = pic_topic
self.pic_type = pic_type
self.polling_configs = polling_configs
self.project_id = project_id
self.schedule_cycle_date = schedule_cycle_date
self.schedule_day = schedule_day
self.schedule_day_size = schedule_day_size
self.schedule_times = schedule_times
self.schedule_type = schedule_type
self.slice_execution_time = slice_execution_time
self.storage_used = storage_used
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.acu_used is not None:
result['AcuUsed'] = self.acu_used
if self.algorithm_type is not None:
result['AlgorithmType'] = self.algorithm_type
if self.algorithms_shrink is not None:
result['Algorithms'] = self.algorithms_shrink
if self.compute_picture_type is not None:
result['ComputePictureType'] = self.compute_picture_type
if self.compute_picture_value is not None:
result['ComputePictureValue'] = self.compute_picture_value
if self.datasource_type is not None:
result['DatasourceType'] = self.datasource_type
if self.devices_shrink is not None:
result['Devices'] = self.devices_shrink
if self.instance_name is not None:
result['InstanceName'] = self.instance_name
if self.is_frame_extraction is not None:
result['IsFrameExtraction'] = self.is_frame_extraction
if self.is_polling is not None:
result['IsPolling'] = self.is_polling
if self.overall_execution_time is not None:
result['OverallExecutionTime'] = self.overall_execution_time
if self.pic_topic is not None:
result['PicTopic'] = self.pic_topic
if self.pic_type is not None:
result['PicType'] = self.pic_type
if self.polling_configs is not None:
result['PollingConfigs'] = self.polling_configs
if self.project_id is not None:
result['ProjectId'] = self.project_id
if self.schedule_cycle_date is not None:
result['ScheduleCycleDate'] = self.schedule_cycle_date
if self.schedule_day is not None:
result['ScheduleDay'] = self.schedule_day
if self.schedule_day_size is not None:
result['ScheduleDaySize'] = self.schedule_day_size
if self.schedule_times is not None:
result['ScheduleTimes'] = self.schedule_times
if self.schedule_type is not None:
result['ScheduleType'] = self.schedule_type
if self.slice_execution_time is not None:
result['SliceExecutionTime'] = self.slice_execution_time
if self.storage_used is not None:
result['StorageUsed'] = self.storage_used
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AcuUsed') is not None:
self.acu_used = m.get('AcuUsed')
if m.get('AlgorithmType') is not None:
self.algorithm_type = m.get('AlgorithmType')
if m.get('Algorithms') is not None:
self.algorithms_shrink = m.get('Algorithms')
if m.get('ComputePictureType') is not None:
self.compute_picture_type = m.get('ComputePictureType')
if m.get('ComputePictureValue') is not None:
self.compute_picture_value = m.get('ComputePictureValue')
if m.get('DatasourceType') is not None:
self.datasource_type = m.get('DatasourceType')
if m.get('Devices') is not None:
self.devices_shrink = m.get('Devices')
if m.get('InstanceName') is not None:
self.instance_name = m.get('InstanceName')
if m.get('IsFrameExtraction') is not None:
self.is_frame_extraction = m.get('IsFrameExtraction')
if m.get('IsPolling') is not None:
self.is_polling = m.get('IsPolling')
if m.get('OverallExecutionTime') is not None:
self.overall_execution_time = m.get('OverallExecutionTime')
if m.get('PicTopic') is not None:
self.pic_topic = m.get('PicTopic')
if m.get('PicType') is not None:
self.pic_type = m.get('PicType')
if m.get('PollingConfigs') is not None:
self.polling_configs = m.get('PollingConfigs')
if m.get('ProjectId') is not None:
self.project_id = m.get('ProjectId')
if m.get('ScheduleCycleDate') is not None:
self.schedule_cycle_date = m.get('ScheduleCycleDate')
if m.get('ScheduleDay') is not None:
self.schedule_day = m.get('ScheduleDay')
if m.get('ScheduleDaySize') is not None:
self.schedule_day_size = m.get('ScheduleDaySize')
if m.get('ScheduleTimes') is not None:
self.schedule_times = m.get('ScheduleTimes')
if m.get('ScheduleType') is not None:
self.schedule_type = m.get('ScheduleType')
if m.get('SliceExecutionTime') is not None:
self.slice_execution_time = m.get('SliceExecutionTime')
if m.get('StorageUsed') is not None:
self.storage_used = m.get('StorageUsed')
return self
class CreateComputeInstanceResponseBodyData(TeaModel):
def __init__(
self,
acu_used: int = None,
instance_id: str = None,
):
self.acu_used = acu_used
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.acu_used is not None:
result['AcuUsed'] = self.acu_used
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AcuUsed') is not None:
self.acu_used = m.get('AcuUsed')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class CreateComputeInstanceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: CreateComputeInstanceResponseBodyData = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
self.success = success
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = CreateComputeInstanceResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class CreateComputeInstanceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CreateComputeInstanceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CreateComputeInstanceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateCorpRequest(TeaModel):
def __init__(
self,
algorithm_type: str = None,
app_name: str = None,
corp_name: str = None,
description: str = None,
icon_path: str = None,
isv_sub_id: str = None,
parent_corp_id: str = None,
):
self.algorithm_type = algorithm_type
self.app_name = app_name
self.corp_name = corp_name
self.description = description
self.icon_path = icon_path
self.isv_sub_id = isv_sub_id
self.parent_corp_id = parent_corp_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_type is not None:
result['AlgorithmType'] = self.algorithm_type
if self.app_name is not None:
result['AppName'] = self.app_name
if self.corp_name is not None:
result['CorpName'] = self.corp_name
if self.description is not None:
result['Description'] = self.description
if self.icon_path is not None:
result['IconPath'] = self.icon_path
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.parent_corp_id is not None:
result['ParentCorpId'] = self.parent_corp_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmType') is not None:
self.algorithm_type = m.get('AlgorithmType')
if m.get('AppName') is not None:
self.app_name = m.get('AppName')
if m.get('CorpName') is not None:
self.corp_name = m.get('CorpName')
if m.get('Description') is not None:
self.description = m.get('Description')
if m.get('IconPath') is not None:
self.icon_path = m.get('IconPath')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('ParentCorpId') is not None:
self.parent_corp_id = m.get('ParentCorpId')
return self
class CreateCorpResponseBody(TeaModel):
def __init__(
self,
code: str = None,
corp_id: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.corp_id = corp_id
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class CreateCorpResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CreateCorpResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CreateCorpResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateCorpGroupRequest(TeaModel):
def __init__(
self,
client_token: str = None,
corp_id: str = None,
group_id: str = None,
):
self.client_token = client_token
self.corp_id = corp_id
self.group_id = group_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.client_token is not None:
result['ClientToken'] = self.client_token
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.group_id is not None:
result['GroupId'] = self.group_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ClientToken') is not None:
self.client_token = m.get('ClientToken')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('GroupId') is not None:
self.group_id = m.get('GroupId')
return self
class CreateCorpGroupResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.message = message
self.request_id = request_id
self.success = success
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class CreateCorpGroupResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CreateCorpGroupResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CreateCorpGroupResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateDeviceRequest(TeaModel):
def __init__(
self,
activate_code: str = None,
audio_enable: str = None,
city_code: str = None,
corp_id: str = None,
data_source_type: str = None,
device_address: str = None,
device_direction: str = None,
device_id: str = None,
device_model: str = None,
device_name: str = None,
device_rate: str = None,
device_resolution: str = None,
device_site: str = None,
device_sn: str = None,
device_type: str = None,
encode_format: str = None,
frame_rate: str = None,
gov_length: str = None,
in_protocol: str = None,
latitude: str = None,
longitude: str = None,
osdtime_enable: str = None,
osdtime_type: str = None,
osdtime_x: str = None,
osdtime_y: str = None,
parent_device_id: str = None,
sub_device_count: int = None,
sub_device_id_list: str = None,
up_stream_mode: str = None,
vendor: str = None,
):
self.activate_code = activate_code
self.audio_enable = audio_enable
self.city_code = city_code
self.corp_id = corp_id
self.data_source_type = data_source_type
self.device_address = device_address
self.device_direction = device_direction
self.device_id = device_id
self.device_model = device_model
self.device_name = device_name
self.device_rate = device_rate
self.device_resolution = device_resolution
self.device_site = device_site
self.device_sn = device_sn
self.device_type = device_type
self.encode_format = encode_format
self.frame_rate = frame_rate
self.gov_length = gov_length
self.in_protocol = in_protocol
self.latitude = latitude
self.longitude = longitude
self.osdtime_enable = osdtime_enable
self.osdtime_type = osdtime_type
self.osdtime_x = osdtime_x
self.osdtime_y = osdtime_y
self.parent_device_id = parent_device_id
self.sub_device_count = sub_device_count
self.sub_device_id_list = sub_device_id_list
self.up_stream_mode = up_stream_mode
self.vendor = vendor
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.activate_code is not None:
result['ActivateCode'] = self.activate_code
if self.audio_enable is not None:
result['AudioEnable'] = self.audio_enable
if self.city_code is not None:
result['CityCode'] = self.city_code
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.data_source_type is not None:
result['DataSourceType'] = self.data_source_type
if self.device_address is not None:
result['DeviceAddress'] = self.device_address
if self.device_direction is not None:
result['DeviceDirection'] = self.device_direction
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.device_model is not None:
result['DeviceModel'] = self.device_model
if self.device_name is not None:
result['DeviceName'] = self.device_name
if self.device_rate is not None:
result['DeviceRate'] = self.device_rate
if self.device_resolution is not None:
result['DeviceResolution'] = self.device_resolution
if self.device_site is not None:
result['DeviceSite'] = self.device_site
if self.device_sn is not None:
result['DeviceSn'] = self.device_sn
if self.device_type is not None:
result['DeviceType'] = self.device_type
if self.encode_format is not None:
result['EncodeFormat'] = self.encode_format
if self.frame_rate is not None:
result['FrameRate'] = self.frame_rate
if self.gov_length is not None:
result['GovLength'] = self.gov_length
if self.in_protocol is not None:
result['InProtocol'] = self.in_protocol
if self.latitude is not None:
result['Latitude'] = self.latitude
if self.longitude is not None:
result['Longitude'] = self.longitude
if self.osdtime_enable is not None:
result['OSDTimeEnable'] = self.osdtime_enable
if self.osdtime_type is not None:
result['OSDTimeType'] = self.osdtime_type
if self.osdtime_x is not None:
result['OSDTimeX'] = self.osdtime_x
if self.osdtime_y is not None:
result['OSDTimeY'] = self.osdtime_y
if self.parent_device_id is not None:
result['ParentDeviceId'] = self.parent_device_id
if self.sub_device_count is not None:
result['SubDeviceCount'] = self.sub_device_count
if self.sub_device_id_list is not None:
result['SubDeviceIdList'] = self.sub_device_id_list
if self.up_stream_mode is not None:
result['UpStreamMode'] = self.up_stream_mode
if self.vendor is not None:
result['Vendor'] = self.vendor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ActivateCode') is not None:
self.activate_code = m.get('ActivateCode')
if m.get('AudioEnable') is not None:
self.audio_enable = m.get('AudioEnable')
if m.get('CityCode') is not None:
self.city_code = m.get('CityCode')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DataSourceType') is not None:
self.data_source_type = m.get('DataSourceType')
if m.get('DeviceAddress') is not None:
self.device_address = m.get('DeviceAddress')
if m.get('DeviceDirection') is not None:
self.device_direction = m.get('DeviceDirection')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('DeviceModel') is not None:
self.device_model = m.get('DeviceModel')
if m.get('DeviceName') is not None:
self.device_name = m.get('DeviceName')
if m.get('DeviceRate') is not None:
self.device_rate = m.get('DeviceRate')
if m.get('DeviceResolution') is not None:
self.device_resolution = m.get('DeviceResolution')
if m.get('DeviceSite') is not None:
self.device_site = m.get('DeviceSite')
if m.get('DeviceSn') is not None:
self.device_sn = m.get('DeviceSn')
if m.get('DeviceType') is not None:
self.device_type = m.get('DeviceType')
if m.get('EncodeFormat') is not None:
self.encode_format = m.get('EncodeFormat')
if m.get('FrameRate') is not None:
self.frame_rate = m.get('FrameRate')
if m.get('GovLength') is not None:
self.gov_length = m.get('GovLength')
if m.get('InProtocol') is not None:
self.in_protocol = m.get('InProtocol')
if m.get('Latitude') is not None:
self.latitude = m.get('Latitude')
if m.get('Longitude') is not None:
self.longitude = m.get('Longitude')
if m.get('OSDTimeEnable') is not None:
self.osdtime_enable = m.get('OSDTimeEnable')
if m.get('OSDTimeType') is not None:
self.osdtime_type = m.get('OSDTimeType')
if m.get('OSDTimeX') is not None:
self.osdtime_x = m.get('OSDTimeX')
if m.get('OSDTimeY') is not None:
self.osdtime_y = m.get('OSDTimeY')
if m.get('ParentDeviceId') is not None:
self.parent_device_id = m.get('ParentDeviceId')
if m.get('SubDeviceCount') is not None:
self.sub_device_count = m.get('SubDeviceCount')
if m.get('SubDeviceIdList') is not None:
self.sub_device_id_list = m.get('SubDeviceIdList')
if m.get('UpStreamMode') is not None:
self.up_stream_mode = m.get('UpStreamMode')
if m.get('Vendor') is not None:
self.vendor = m.get('Vendor')
return self
class CreateDeviceResponseBodyDataSubDeviceInfo(TeaModel):
def __init__(
self,
sub_device_id: str = None,
):
self.sub_device_id = sub_device_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.sub_device_id is not None:
result['SubDeviceId'] = self.sub_device_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('SubDeviceId') is not None:
self.sub_device_id = m.get('SubDeviceId')
return self
class CreateDeviceResponseBodyData(TeaModel):
def __init__(
self,
device_id: str = None,
password: str = None,
server_id: str = None,
server_ip: str = None,
server_port: str = None,
server_realm: str = None,
sub_device_info: List[CreateDeviceResponseBodyDataSubDeviceInfo] = None,
):
self.device_id = device_id
self.password = password
self.server_id = server_id
self.server_ip = server_ip
self.server_port = server_port
self.server_realm = server_realm
self.sub_device_info = sub_device_info
def validate(self):
if self.sub_device_info:
for k in self.sub_device_info:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.password is not None:
result['Password'] = self.password
if self.server_id is not None:
result['ServerId'] = self.server_id
if self.server_ip is not None:
result['ServerIp'] = self.server_ip
if self.server_port is not None:
result['ServerPort'] = self.server_port
if self.server_realm is not None:
result['ServerRealm'] = self.server_realm
result['SubDeviceInfo'] = []
if self.sub_device_info is not None:
for k in self.sub_device_info:
result['SubDeviceInfo'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('Password') is not None:
self.password = m.get('Password')
if m.get('ServerId') is not None:
self.server_id = m.get('ServerId')
if m.get('ServerIp') is not None:
self.server_ip = m.get('ServerIp')
if m.get('ServerPort') is not None:
self.server_port = m.get('ServerPort')
if m.get('ServerRealm') is not None:
self.server_realm = m.get('ServerRealm')
self.sub_device_info = []
if m.get('SubDeviceInfo') is not None:
for k in m.get('SubDeviceInfo'):
temp_model = CreateDeviceResponseBodyDataSubDeviceInfo()
self.sub_device_info.append(temp_model.from_map(k))
return self
class CreateDeviceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: CreateDeviceResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = CreateDeviceResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class CreateDeviceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CreateDeviceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CreateDeviceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateModelServiceRequest(TeaModel):
def __init__(
self,
algorithm_code: str = None,
client_token: str = None,
instance_name: str = None,
qpsrequired: int = None,
):
self.algorithm_code = algorithm_code
self.client_token = client_token
self.instance_name = instance_name
self.qpsrequired = qpsrequired
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_code is not None:
result['AlgorithmCode'] = self.algorithm_code
if self.client_token is not None:
result['ClientToken'] = self.client_token
if self.instance_name is not None:
result['InstanceName'] = self.instance_name
if self.qpsrequired is not None:
result['QPSRequired'] = self.qpsrequired
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmCode') is not None:
self.algorithm_code = m.get('AlgorithmCode')
if m.get('ClientToken') is not None:
self.client_token = m.get('ClientToken')
if m.get('InstanceName') is not None:
self.instance_name = m.get('InstanceName')
if m.get('QPSRequired') is not None:
self.qpsrequired = m.get('QPSRequired')
return self
class CreateModelServiceResponseBodyDataModelApiList(TeaModel):
def __init__(
self,
algorithm_api_code: str = None,
api_id: str = None,
api_name: str = None,
api_path: str = None,
create_time: str = None,
):
self.algorithm_api_code = algorithm_api_code
self.api_id = api_id
self.api_name = api_name
self.api_path = api_path
self.create_time = create_time
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_api_code is not None:
result['AlgorithmApiCode'] = self.algorithm_api_code
if self.api_id is not None:
result['ApiId'] = self.api_id
if self.api_name is not None:
result['ApiName'] = self.api_name
if self.api_path is not None:
result['ApiPath'] = self.api_path
if self.create_time is not None:
result['CreateTime'] = self.create_time
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmApiCode') is not None:
self.algorithm_api_code = m.get('AlgorithmApiCode')
if m.get('ApiId') is not None:
self.api_id = m.get('ApiId')
if m.get('ApiName') is not None:
self.api_name = m.get('ApiName')
if m.get('ApiPath') is not None:
self.api_path = m.get('ApiPath')
if m.get('CreateTime') is not None:
self.create_time = m.get('CreateTime')
return self
class CreateModelServiceResponseBodyData(TeaModel):
def __init__(
self,
app_code: str = None,
model_api_list: List[CreateModelServiceResponseBodyDataModelApiList] = None,
model_service_instance_id: str = None,
model_service_status: str = None,
qps_required: int = None,
):
self.app_code = app_code
self.model_api_list = model_api_list
self.model_service_instance_id = model_service_instance_id
self.model_service_status = model_service_status
self.qps_required = qps_required
def validate(self):
if self.model_api_list:
for k in self.model_api_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.app_code is not None:
result['AppCode'] = self.app_code
result['ModelApiList'] = []
if self.model_api_list is not None:
for k in self.model_api_list:
result['ModelApiList'].append(k.to_map() if k else None)
if self.model_service_instance_id is not None:
result['ModelServiceInstanceId'] = self.model_service_instance_id
if self.model_service_status is not None:
result['ModelServiceStatus'] = self.model_service_status
if self.qps_required is not None:
result['QpsRequired'] = self.qps_required
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AppCode') is not None:
self.app_code = m.get('AppCode')
self.model_api_list = []
if m.get('ModelApiList') is not None:
for k in m.get('ModelApiList'):
temp_model = CreateModelServiceResponseBodyDataModelApiList()
self.model_api_list.append(temp_model.from_map(k))
if m.get('ModelServiceInstanceId') is not None:
self.model_service_instance_id = m.get('ModelServiceInstanceId')
if m.get('ModelServiceStatus') is not None:
self.model_service_status = m.get('ModelServiceStatus')
if m.get('QpsRequired') is not None:
self.qps_required = m.get('QpsRequired')
return self
class CreateModelServiceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: CreateModelServiceResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = CreateModelServiceResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class CreateModelServiceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CreateModelServiceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CreateModelServiceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateNewDeviceRequest(TeaModel):
def __init__(
self,
city_code: str = None,
corp_id: str = None,
data_source_type: str = None,
device_address: str = None,
device_id: str = None,
device_model: str = None,
device_name: str = None,
device_type: str = None,
file_path: str = None,
in_protocol: str = None,
latitude: str = None,
longitude: str = None,
sub_device_count: int = None,
vendor: str = None,
):
self.city_code = city_code
self.corp_id = corp_id
self.data_source_type = data_source_type
self.device_address = device_address
self.device_id = device_id
self.device_model = device_model
self.device_name = device_name
self.device_type = device_type
self.file_path = file_path
self.in_protocol = in_protocol
self.latitude = latitude
self.longitude = longitude
self.sub_device_count = sub_device_count
self.vendor = vendor
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.city_code is not None:
result['CityCode'] = self.city_code
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.data_source_type is not None:
result['DataSourceType'] = self.data_source_type
if self.device_address is not None:
result['DeviceAddress'] = self.device_address
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.device_model is not None:
result['DeviceModel'] = self.device_model
if self.device_name is not None:
result['DeviceName'] = self.device_name
if self.device_type is not None:
result['DeviceType'] = self.device_type
if self.file_path is not None:
result['FilePath'] = self.file_path
if self.in_protocol is not None:
result['InProtocol'] = self.in_protocol
if self.latitude is not None:
result['Latitude'] = self.latitude
if self.longitude is not None:
result['Longitude'] = self.longitude
if self.sub_device_count is not None:
result['SubDeviceCount'] = self.sub_device_count
if self.vendor is not None:
result['Vendor'] = self.vendor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CityCode') is not None:
self.city_code = m.get('CityCode')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DataSourceType') is not None:
self.data_source_type = m.get('DataSourceType')
if m.get('DeviceAddress') is not None:
self.device_address = m.get('DeviceAddress')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('DeviceModel') is not None:
self.device_model = m.get('DeviceModel')
if m.get('DeviceName') is not None:
self.device_name = m.get('DeviceName')
if m.get('DeviceType') is not None:
self.device_type = m.get('DeviceType')
if m.get('FilePath') is not None:
self.file_path = m.get('FilePath')
if m.get('InProtocol') is not None:
self.in_protocol = m.get('InProtocol')
if m.get('Latitude') is not None:
self.latitude = m.get('Latitude')
if m.get('Longitude') is not None:
self.longitude = m.get('Longitude')
if m.get('SubDeviceCount') is not None:
self.sub_device_count = m.get('SubDeviceCount')
if m.get('Vendor') is not None:
self.vendor = m.get('Vendor')
return self
class CreateNewDeviceResponseBodyDataSubDeviceInfo(TeaModel):
def __init__(
self,
channel_device_id: str = None,
):
self.channel_device_id = channel_device_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.channel_device_id is not None:
result['ChannelDeviceId'] = self.channel_device_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ChannelDeviceId') is not None:
self.channel_device_id = m.get('ChannelDeviceId')
return self
class CreateNewDeviceResponseBodyData(TeaModel):
def __init__(
self,
device_id: str = None,
password: str = None,
server_id: str = None,
server_ip: str = None,
server_port: str = None,
sip_realm: str = None,
sub_device_info: List[CreateNewDeviceResponseBodyDataSubDeviceInfo] = None,
):
self.device_id = device_id
self.password = password
self.server_id = server_id
self.server_ip = server_ip
self.server_port = server_port
self.sip_realm = sip_realm
self.sub_device_info = sub_device_info
def validate(self):
if self.sub_device_info:
for k in self.sub_device_info:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.password is not None:
result['Password'] = self.password
if self.server_id is not None:
result['ServerId'] = self.server_id
if self.server_ip is not None:
result['ServerIp'] = self.server_ip
if self.server_port is not None:
result['ServerPort'] = self.server_port
if self.sip_realm is not None:
result['SipRealm'] = self.sip_realm
result['SubDeviceInfo'] = []
if self.sub_device_info is not None:
for k in self.sub_device_info:
result['SubDeviceInfo'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('Password') is not None:
self.password = m.get('Password')
if m.get('ServerId') is not None:
self.server_id = m.get('ServerId')
if m.get('ServerIp') is not None:
self.server_ip = m.get('ServerIp')
if m.get('ServerPort') is not None:
self.server_port = m.get('ServerPort')
if m.get('SipRealm') is not None:
self.sip_realm = m.get('SipRealm')
self.sub_device_info = []
if m.get('SubDeviceInfo') is not None:
for k in m.get('SubDeviceInfo'):
temp_model = CreateNewDeviceResponseBodyDataSubDeviceInfo()
self.sub_device_info.append(temp_model.from_map(k))
return self
class CreateNewDeviceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: CreateNewDeviceResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = CreateNewDeviceResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class CreateNewDeviceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CreateNewDeviceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CreateNewDeviceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateScanDeviceRequest(TeaModel):
def __init__(
self,
audio_enable: str = None,
city_code: str = None,
corp_id: str = None,
data_source_type: str = None,
device_address: str = None,
device_direction: str = None,
device_id: str = None,
device_model: str = None,
device_name: str = None,
device_rate: str = None,
device_resolution: str = None,
device_site: str = None,
device_sn: str = None,
device_type: str = None,
encode_format: str = None,
frame_rate: str = None,
gov_length: str = None,
in_protocol: str = None,
latitude: str = None,
longitude: str = None,
osdtime_enable: str = None,
osdtime_type: str = None,
osdtime_x: str = None,
osdtime_y: str = None,
sub_device_count: int = None,
vendor: str = None,
):
self.audio_enable = audio_enable
self.city_code = city_code
self.corp_id = corp_id
self.data_source_type = data_source_type
self.device_address = device_address
self.device_direction = device_direction
self.device_id = device_id
self.device_model = device_model
self.device_name = device_name
self.device_rate = device_rate
self.device_resolution = device_resolution
self.device_site = device_site
self.device_sn = device_sn
self.device_type = device_type
self.encode_format = encode_format
self.frame_rate = frame_rate
self.gov_length = gov_length
self.in_protocol = in_protocol
self.latitude = latitude
self.longitude = longitude
self.osdtime_enable = osdtime_enable
self.osdtime_type = osdtime_type
self.osdtime_x = osdtime_x
self.osdtime_y = osdtime_y
self.sub_device_count = sub_device_count
self.vendor = vendor
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.audio_enable is not None:
result['AudioEnable'] = self.audio_enable
if self.city_code is not None:
result['CityCode'] = self.city_code
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.data_source_type is not None:
result['DataSourceType'] = self.data_source_type
if self.device_address is not None:
result['DeviceAddress'] = self.device_address
if self.device_direction is not None:
result['DeviceDirection'] = self.device_direction
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.device_model is not None:
result['DeviceModel'] = self.device_model
if self.device_name is not None:
result['DeviceName'] = self.device_name
if self.device_rate is not None:
result['DeviceRate'] = self.device_rate
if self.device_resolution is not None:
result['DeviceResolution'] = self.device_resolution
if self.device_site is not None:
result['DeviceSite'] = self.device_site
if self.device_sn is not None:
result['DeviceSn'] = self.device_sn
if self.device_type is not None:
result['DeviceType'] = self.device_type
if self.encode_format is not None:
result['EncodeFormat'] = self.encode_format
if self.frame_rate is not None:
result['FrameRate'] = self.frame_rate
if self.gov_length is not None:
result['GovLength'] = self.gov_length
if self.in_protocol is not None:
result['InProtocol'] = self.in_protocol
if self.latitude is not None:
result['Latitude'] = self.latitude
if self.longitude is not None:
result['Longitude'] = self.longitude
if self.osdtime_enable is not None:
result['OSDTimeEnable'] = self.osdtime_enable
if self.osdtime_type is not None:
result['OSDTimeType'] = self.osdtime_type
if self.osdtime_x is not None:
result['OSDTimeX'] = self.osdtime_x
if self.osdtime_y is not None:
result['OSDTimeY'] = self.osdtime_y
if self.sub_device_count is not None:
result['SubDeviceCount'] = self.sub_device_count
if self.vendor is not None:
result['Vendor'] = self.vendor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AudioEnable') is not None:
self.audio_enable = m.get('AudioEnable')
if m.get('CityCode') is not None:
self.city_code = m.get('CityCode')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DataSourceType') is not None:
self.data_source_type = m.get('DataSourceType')
if m.get('DeviceAddress') is not None:
self.device_address = m.get('DeviceAddress')
if m.get('DeviceDirection') is not None:
self.device_direction = m.get('DeviceDirection')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('DeviceModel') is not None:
self.device_model = m.get('DeviceModel')
if m.get('DeviceName') is not None:
self.device_name = m.get('DeviceName')
if m.get('DeviceRate') is not None:
self.device_rate = m.get('DeviceRate')
if m.get('DeviceResolution') is not None:
self.device_resolution = m.get('DeviceResolution')
if m.get('DeviceSite') is not None:
self.device_site = m.get('DeviceSite')
if m.get('DeviceSn') is not None:
self.device_sn = m.get('DeviceSn')
if m.get('DeviceType') is not None:
self.device_type = m.get('DeviceType')
if m.get('EncodeFormat') is not None:
self.encode_format = m.get('EncodeFormat')
if m.get('FrameRate') is not None:
self.frame_rate = m.get('FrameRate')
if m.get('GovLength') is not None:
self.gov_length = m.get('GovLength')
if m.get('InProtocol') is not None:
self.in_protocol = m.get('InProtocol')
if m.get('Latitude') is not None:
self.latitude = m.get('Latitude')
if m.get('Longitude') is not None:
self.longitude = m.get('Longitude')
if m.get('OSDTimeEnable') is not None:
self.osdtime_enable = m.get('OSDTimeEnable')
if m.get('OSDTimeType') is not None:
self.osdtime_type = m.get('OSDTimeType')
if m.get('OSDTimeX') is not None:
self.osdtime_x = m.get('OSDTimeX')
if m.get('OSDTimeY') is not None:
self.osdtime_y = m.get('OSDTimeY')
if m.get('SubDeviceCount') is not None:
self.sub_device_count = m.get('SubDeviceCount')
if m.get('Vendor') is not None:
self.vendor = m.get('Vendor')
return self
class CreateScanDeviceResponseBodyDataSubDeviceInfo(TeaModel):
def __init__(
self,
channel_device_id: str = None,
):
self.channel_device_id = channel_device_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.channel_device_id is not None:
result['ChannelDeviceId'] = self.channel_device_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ChannelDeviceId') is not None:
self.channel_device_id = m.get('ChannelDeviceId')
return self
class CreateScanDeviceResponseBodyData(TeaModel):
def __init__(
self,
corp_id: str = None,
device_id: str = None,
device_sn: str = None,
password: str = None,
server_id: str = None,
server_ip: str = None,
server_port: str = None,
server_realm: str = None,
sip_device_gb_id: str = None,
sub_device_info: List[CreateScanDeviceResponseBodyDataSubDeviceInfo] = None,
):
self.corp_id = corp_id
self.device_id = device_id
self.device_sn = device_sn
self.password = password
self.server_id = server_id
self.server_ip = server_ip
self.server_port = server_port
self.server_realm = server_realm
self.sip_device_gb_id = sip_device_gb_id
self.sub_device_info = sub_device_info
def validate(self):
if self.sub_device_info:
for k in self.sub_device_info:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.device_sn is not None:
result['DeviceSn'] = self.device_sn
if self.password is not None:
result['Password'] = self.password
if self.server_id is not None:
result['ServerId'] = self.server_id
if self.server_ip is not None:
result['ServerIp'] = self.server_ip
if self.server_port is not None:
result['ServerPort'] = self.server_port
if self.server_realm is not None:
result['ServerRealm'] = self.server_realm
if self.sip_device_gb_id is not None:
result['SipDeviceGbId'] = self.sip_device_gb_id
result['SubDeviceInfo'] = []
if self.sub_device_info is not None:
for k in self.sub_device_info:
result['SubDeviceInfo'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('DeviceSn') is not None:
self.device_sn = m.get('DeviceSn')
if m.get('Password') is not None:
self.password = m.get('Password')
if m.get('ServerId') is not None:
self.server_id = m.get('ServerId')
if m.get('ServerIp') is not None:
self.server_ip = m.get('ServerIp')
if m.get('ServerPort') is not None:
self.server_port = m.get('ServerPort')
if m.get('ServerRealm') is not None:
self.server_realm = m.get('ServerRealm')
if m.get('SipDeviceGbId') is not None:
self.sip_device_gb_id = m.get('SipDeviceGbId')
self.sub_device_info = []
if m.get('SubDeviceInfo') is not None:
for k in m.get('SubDeviceInfo'):
temp_model = CreateScanDeviceResponseBodyDataSubDeviceInfo()
self.sub_device_info.append(temp_model.from_map(k))
return self
class CreateScanDeviceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: CreateScanDeviceResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = CreateScanDeviceResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class CreateScanDeviceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CreateScanDeviceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CreateScanDeviceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateSearchTableRequest(TeaModel):
def __init__(
self,
algorithm_id: str = None,
search_table_name: str = None,
target_type: str = None,
):
self.algorithm_id = algorithm_id
self.search_table_name = search_table_name
self.target_type = target_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_id is not None:
result['AlgorithmId'] = self.algorithm_id
if self.search_table_name is not None:
result['SearchTableName'] = self.search_table_name
if self.target_type is not None:
result['TargetType'] = self.target_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmId') is not None:
self.algorithm_id = m.get('AlgorithmId')
if m.get('SearchTableName') is not None:
self.search_table_name = m.get('SearchTableName')
if m.get('TargetType') is not None:
self.target_type = m.get('TargetType')
return self
class CreateSearchTableResponseBodyData(TeaModel):
def __init__(
self,
search_table_id: str = None,
):
self.search_table_id = search_table_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.search_table_id is not None:
result['SearchTableId'] = self.search_table_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('SearchTableId') is not None:
self.search_table_id = m.get('SearchTableId')
return self
class CreateSearchTableResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: CreateSearchTableResponseBodyData = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = CreateSearchTableResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class CreateSearchTableResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CreateSearchTableResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CreateSearchTableResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateSubscribeRequest(TeaModel):
def __init__(
self,
device_id: str = None,
push_config: str = None,
):
self.device_id = device_id
self.push_config = push_config
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.push_config is not None:
result['PushConfig'] = self.push_config
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('PushConfig') is not None:
self.push_config = m.get('PushConfig')
return self
class CreateSubscribeResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class CreateSubscribeResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CreateSubscribeResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CreateSubscribeResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateUserRequest(TeaModel):
def __init__(
self,
address: str = None,
age: int = None,
attachment: str = None,
biz_id: str = None,
corp_id: str = None,
face_image_url: str = None,
gender: int = None,
id_number: str = None,
isv_sub_id: str = None,
phone_no: str = None,
plate_no: str = None,
user_group_id: int = None,
user_name: str = None,
):
self.address = address
self.age = age
self.attachment = attachment
self.biz_id = biz_id
self.corp_id = corp_id
self.face_image_url = face_image_url
self.gender = gender
self.id_number = id_number
self.isv_sub_id = isv_sub_id
self.phone_no = phone_no
self.plate_no = plate_no
self.user_group_id = user_group_id
self.user_name = user_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.address is not None:
result['Address'] = self.address
if self.age is not None:
result['Age'] = self.age
if self.attachment is not None:
result['Attachment'] = self.attachment
if self.biz_id is not None:
result['BizId'] = self.biz_id
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.face_image_url is not None:
result['FaceImageUrl'] = self.face_image_url
if self.gender is not None:
result['Gender'] = self.gender
if self.id_number is not None:
result['IdNumber'] = self.id_number
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.phone_no is not None:
result['PhoneNo'] = self.phone_no
if self.plate_no is not None:
result['PlateNo'] = self.plate_no
if self.user_group_id is not None:
result['UserGroupId'] = self.user_group_id
if self.user_name is not None:
result['UserName'] = self.user_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Address') is not None:
self.address = m.get('Address')
if m.get('Age') is not None:
self.age = m.get('Age')
if m.get('Attachment') is not None:
self.attachment = m.get('Attachment')
if m.get('BizId') is not None:
self.biz_id = m.get('BizId')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('FaceImageUrl') is not None:
self.face_image_url = m.get('FaceImageUrl')
if m.get('Gender') is not None:
self.gender = m.get('Gender')
if m.get('IdNumber') is not None:
self.id_number = m.get('IdNumber')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('PhoneNo') is not None:
self.phone_no = m.get('PhoneNo')
if m.get('PlateNo') is not None:
self.plate_no = m.get('PlateNo')
if m.get('UserGroupId') is not None:
self.user_group_id = m.get('UserGroupId')
if m.get('UserName') is not None:
self.user_name = m.get('UserName')
return self
class CreateUserResponseBodyData(TeaModel):
def __init__(
self,
address: str = None,
age: str = None,
attachment: str = None,
biz_id: str = None,
face_image_url: str = None,
gender: str = None,
id_number: str = None,
isv_sub_id: str = None,
phone_no: str = None,
plate_no: str = None,
user_group_id: int = None,
user_id: int = None,
user_name: str = None,
):
self.address = address
self.age = age
self.attachment = attachment
self.biz_id = biz_id
self.face_image_url = face_image_url
self.gender = gender
self.id_number = id_number
self.isv_sub_id = isv_sub_id
self.phone_no = phone_no
self.plate_no = plate_no
self.user_group_id = user_group_id
self.user_id = user_id
self.user_name = user_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.address is not None:
result['Address'] = self.address
if self.age is not None:
result['Age'] = self.age
if self.attachment is not None:
result['Attachment'] = self.attachment
if self.biz_id is not None:
result['BizId'] = self.biz_id
if self.face_image_url is not None:
result['FaceImageUrl'] = self.face_image_url
if self.gender is not None:
result['Gender'] = self.gender
if self.id_number is not None:
result['IdNumber'] = self.id_number
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.phone_no is not None:
result['PhoneNo'] = self.phone_no
if self.plate_no is not None:
result['PlateNo'] = self.plate_no
if self.user_group_id is not None:
result['UserGroupId'] = self.user_group_id
if self.user_id is not None:
result['UserId'] = self.user_id
if self.user_name is not None:
result['UserName'] = self.user_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Address') is not None:
self.address = m.get('Address')
if m.get('Age') is not None:
self.age = m.get('Age')
if m.get('Attachment') is not None:
self.attachment = m.get('Attachment')
if m.get('BizId') is not None:
self.biz_id = m.get('BizId')
if m.get('FaceImageUrl') is not None:
self.face_image_url = m.get('FaceImageUrl')
if m.get('Gender') is not None:
self.gender = m.get('Gender')
if m.get('IdNumber') is not None:
self.id_number = m.get('IdNumber')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('PhoneNo') is not None:
self.phone_no = m.get('PhoneNo')
if m.get('PlateNo') is not None:
self.plate_no = m.get('PlateNo')
if m.get('UserGroupId') is not None:
self.user_group_id = m.get('UserGroupId')
if m.get('UserId') is not None:
self.user_id = m.get('UserId')
if m.get('UserName') is not None:
self.user_name = m.get('UserName')
return self
class CreateUserResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: CreateUserResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = CreateUserResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class CreateUserResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CreateUserResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CreateUserResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateUserGroupRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
isv_sub_id: str = None,
parent_user_group_id: int = None,
user_group_name: str = None,
):
self.corp_id = corp_id
self.isv_sub_id = isv_sub_id
self.parent_user_group_id = parent_user_group_id
self.user_group_name = user_group_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.parent_user_group_id is not None:
result['ParentUserGroupId'] = self.parent_user_group_id
if self.user_group_name is not None:
result['UserGroupName'] = self.user_group_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('ParentUserGroupId') is not None:
self.parent_user_group_id = m.get('ParentUserGroupId')
if m.get('UserGroupName') is not None:
self.user_group_name = m.get('UserGroupName')
return self
class CreateUserGroupResponseBodyData(TeaModel):
def __init__(
self,
isv_sub_id: str = None,
user_group_id: int = None,
user_group_name: str = None,
):
self.isv_sub_id = isv_sub_id
self.user_group_id = user_group_id
self.user_group_name = user_group_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.user_group_id is not None:
result['UserGroupId'] = self.user_group_id
if self.user_group_name is not None:
result['UserGroupName'] = self.user_group_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('UserGroupId') is not None:
self.user_group_id = m.get('UserGroupId')
if m.get('UserGroupName') is not None:
self.user_group_name = m.get('UserGroupName')
return self
class CreateUserGroupResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: CreateUserGroupResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = CreateUserGroupResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class CreateUserGroupResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CreateUserGroupResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CreateUserGroupResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateVideoComposeTaskRequest(TeaModel):
def __init__(
self,
audio_file_name: str = None,
bucket_name: str = None,
corp_id: str = None,
domain_name: str = None,
image_file_names: str = None,
image_parameters: str = None,
video_format: str = None,
video_frame_rate: int = None,
):
self.audio_file_name = audio_file_name
self.bucket_name = bucket_name
self.corp_id = corp_id
self.domain_name = domain_name
self.image_file_names = image_file_names
self.image_parameters = image_parameters
self.video_format = video_format
self.video_frame_rate = video_frame_rate
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.audio_file_name is not None:
result['AudioFileName'] = self.audio_file_name
if self.bucket_name is not None:
result['BucketName'] = self.bucket_name
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.domain_name is not None:
result['DomainName'] = self.domain_name
if self.image_file_names is not None:
result['ImageFileNames'] = self.image_file_names
if self.image_parameters is not None:
result['ImageParameters'] = self.image_parameters
if self.video_format is not None:
result['VideoFormat'] = self.video_format
if self.video_frame_rate is not None:
result['VideoFrameRate'] = self.video_frame_rate
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AudioFileName') is not None:
self.audio_file_name = m.get('AudioFileName')
if m.get('BucketName') is not None:
self.bucket_name = m.get('BucketName')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DomainName') is not None:
self.domain_name = m.get('DomainName')
if m.get('ImageFileNames') is not None:
self.image_file_names = m.get('ImageFileNames')
if m.get('ImageParameters') is not None:
self.image_parameters = m.get('ImageParameters')
if m.get('VideoFormat') is not None:
self.video_format = m.get('VideoFormat')
if m.get('VideoFrameRate') is not None:
self.video_frame_rate = m.get('VideoFrameRate')
return self
class CreateVideoComposeTaskResponseBody(TeaModel):
def __init__(
self,
bucket_name: str = None,
code: str = None,
domain_name: str = None,
message: str = None,
request_id: str = None,
):
self.bucket_name = bucket_name
self.code = code
self.domain_name = domain_name
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.bucket_name is not None:
result['BucketName'] = self.bucket_name
if self.code is not None:
result['Code'] = self.code
if self.domain_name is not None:
result['DomainName'] = self.domain_name
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BucketName') is not None:
self.bucket_name = m.get('BucketName')
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('DomainName') is not None:
self.domain_name = m.get('DomainName')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class CreateVideoComposeTaskResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CreateVideoComposeTaskResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CreateVideoComposeTaskResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateVideoSummaryTaskRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
device_id: str = None,
end_time_stamp: int = None,
live_video_summary: str = None,
option_list: str = None,
start_time_stamp: int = None,
):
self.corp_id = corp_id
self.device_id = device_id
self.end_time_stamp = end_time_stamp
self.live_video_summary = live_video_summary
self.option_list = option_list
self.start_time_stamp = start_time_stamp
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.end_time_stamp is not None:
result['EndTimeStamp'] = self.end_time_stamp
if self.live_video_summary is not None:
result['LiveVideoSummary'] = self.live_video_summary
if self.option_list is not None:
result['OptionList'] = self.option_list
if self.start_time_stamp is not None:
result['StartTimeStamp'] = self.start_time_stamp
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('EndTimeStamp') is not None:
self.end_time_stamp = m.get('EndTimeStamp')
if m.get('LiveVideoSummary') is not None:
self.live_video_summary = m.get('LiveVideoSummary')
if m.get('OptionList') is not None:
self.option_list = m.get('OptionList')
if m.get('StartTimeStamp') is not None:
self.start_time_stamp = m.get('StartTimeStamp')
return self
class CreateVideoSummaryTaskResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class CreateVideoSummaryTaskResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CreateVideoSummaryTaskResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CreateVideoSummaryTaskResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateWatchPolicyRequest(TeaModel):
def __init__(
self,
item_match_type: str = None,
similarity_threshold: float = None,
target_type: str = None,
watch_mode: str = None,
watch_policy_name: str = None,
):
self.item_match_type = item_match_type
self.similarity_threshold = similarity_threshold
self.target_type = target_type
self.watch_mode = watch_mode
self.watch_policy_name = watch_policy_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.item_match_type is not None:
result['ItemMatchType'] = self.item_match_type
if self.similarity_threshold is not None:
result['SimilarityThreshold'] = self.similarity_threshold
if self.target_type is not None:
result['TargetType'] = self.target_type
if self.watch_mode is not None:
result['WatchMode'] = self.watch_mode
if self.watch_policy_name is not None:
result['WatchPolicyName'] = self.watch_policy_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ItemMatchType') is not None:
self.item_match_type = m.get('ItemMatchType')
if m.get('SimilarityThreshold') is not None:
self.similarity_threshold = m.get('SimilarityThreshold')
if m.get('TargetType') is not None:
self.target_type = m.get('TargetType')
if m.get('WatchMode') is not None:
self.watch_mode = m.get('WatchMode')
if m.get('WatchPolicyName') is not None:
self.watch_policy_name = m.get('WatchPolicyName')
return self
class CreateWatchPolicyResponseBodyData(TeaModel):
def __init__(
self,
watch_policy_id: str = None,
):
self.watch_policy_id = watch_policy_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.watch_policy_id is not None:
result['WatchPolicyId'] = self.watch_policy_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('WatchPolicyId') is not None:
self.watch_policy_id = m.get('WatchPolicyId')
return self
class CreateWatchPolicyResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: CreateWatchPolicyResponseBodyData = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = CreateWatchPolicyResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class CreateWatchPolicyResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CreateWatchPolicyResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CreateWatchPolicyResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateWatchTaskRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
description: str = None,
device_list: str = None,
message_receiver: str = None,
schedule_cycle_dates: str = None,
schedule_times: str = None,
schedule_type: str = None,
task_name: str = None,
watch_policy_ids: str = None,
):
self.corp_id = corp_id
self.description = description
self.device_list = device_list
self.message_receiver = message_receiver
self.schedule_cycle_dates = schedule_cycle_dates
self.schedule_times = schedule_times
self.schedule_type = schedule_type
self.task_name = task_name
self.watch_policy_ids = watch_policy_ids
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.description is not None:
result['Description'] = self.description
if self.device_list is not None:
result['DeviceList'] = self.device_list
if self.message_receiver is not None:
result['MessageReceiver'] = self.message_receiver
if self.schedule_cycle_dates is not None:
result['ScheduleCycleDates'] = self.schedule_cycle_dates
if self.schedule_times is not None:
result['ScheduleTimes'] = self.schedule_times
if self.schedule_type is not None:
result['ScheduleType'] = self.schedule_type
if self.task_name is not None:
result['TaskName'] = self.task_name
if self.watch_policy_ids is not None:
result['WatchPolicyIds'] = self.watch_policy_ids
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('Description') is not None:
self.description = m.get('Description')
if m.get('DeviceList') is not None:
self.device_list = m.get('DeviceList')
if m.get('MessageReceiver') is not None:
self.message_receiver = m.get('MessageReceiver')
if m.get('ScheduleCycleDates') is not None:
self.schedule_cycle_dates = m.get('ScheduleCycleDates')
if m.get('ScheduleTimes') is not None:
self.schedule_times = m.get('ScheduleTimes')
if m.get('ScheduleType') is not None:
self.schedule_type = m.get('ScheduleType')
if m.get('TaskName') is not None:
self.task_name = m.get('TaskName')
if m.get('WatchPolicyIds') is not None:
self.watch_policy_ids = m.get('WatchPolicyIds')
return self
class CreateWatchTaskResponseBodyData(TeaModel):
def __init__(
self,
watch_task_id: str = None,
):
self.watch_task_id = watch_task_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.watch_task_id is not None:
result['WatchTaskId'] = self.watch_task_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('WatchTaskId') is not None:
self.watch_task_id = m.get('WatchTaskId')
return self
class CreateWatchTaskResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: CreateWatchTaskResponseBodyData = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = CreateWatchTaskResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class CreateWatchTaskResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: CreateWatchTaskResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = CreateWatchTaskResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteAIInstanceRequest(TeaModel):
def __init__(
self,
instance_ids: List[str] = None,
):
# 需要删除的计算实例id
self.instance_ids = instance_ids
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.instance_ids is not None:
result['InstanceIds'] = self.instance_ids
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('InstanceIds') is not None:
self.instance_ids = m.get('InstanceIds')
return self
class DeleteAIInstanceShrinkRequest(TeaModel):
def __init__(
self,
instance_ids_shrink: str = None,
):
# 需要删除的计算实例id
self.instance_ids_shrink = instance_ids_shrink
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.instance_ids_shrink is not None:
result['InstanceIds'] = self.instance_ids_shrink
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('InstanceIds') is not None:
self.instance_ids_shrink = m.get('InstanceIds')
return self
class DeleteAIInstanceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
):
# 返回code码
self.code = code
# 错误信息
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteAIInstanceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteAIInstanceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteAIInstanceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteAiotDeviceRequest(TeaModel):
def __init__(
self,
id: str = None,
):
self.id = id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['Id'] = self.id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Id') is not None:
self.id = m.get('Id')
return self
class DeleteAiotDeviceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteAiotDeviceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteAiotDeviceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteAiotDeviceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteAiotPersonTableRequest(TeaModel):
def __init__(
self,
id: str = None,
person_table_id: str = None,
):
self.id = id
self.person_table_id = person_table_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['Id'] = self.id
if self.person_table_id is not None:
result['PersonTableId'] = self.person_table_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Id') is not None:
self.id = m.get('Id')
if m.get('PersonTableId') is not None:
self.person_table_id = m.get('PersonTableId')
return self
class DeleteAiotPersonTableResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteAiotPersonTableResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteAiotPersonTableResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteAiotPersonTableResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteAiotPersonTableItemRequest(TeaModel):
def __init__(
self,
id: str = None,
person_table_id: str = None,
person_table_item_id: str = None,
):
self.id = id
self.person_table_id = person_table_id
self.person_table_item_id = person_table_item_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['Id'] = self.id
if self.person_table_id is not None:
result['PersonTableId'] = self.person_table_id
if self.person_table_item_id is not None:
result['PersonTableItemId'] = self.person_table_item_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Id') is not None:
self.id = m.get('Id')
if m.get('PersonTableId') is not None:
self.person_table_id = m.get('PersonTableId')
if m.get('PersonTableItemId') is not None:
self.person_table_item_id = m.get('PersonTableItemId')
return self
class DeleteAiotPersonTableItemResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteAiotPersonTableItemResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteAiotPersonTableItemResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteAiotPersonTableItemResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteAiotVehicleTableItemRequest(TeaModel):
def __init__(
self,
id: str = None,
vehicle_table_id: str = None,
vehicle_table_item_id: str = None,
):
self.id = id
self.vehicle_table_id = vehicle_table_id
self.vehicle_table_item_id = vehicle_table_item_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['Id'] = self.id
if self.vehicle_table_id is not None:
result['VehicleTableId'] = self.vehicle_table_id
if self.vehicle_table_item_id is not None:
result['VehicleTableItemId'] = self.vehicle_table_item_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Id') is not None:
self.id = m.get('Id')
if m.get('VehicleTableId') is not None:
self.vehicle_table_id = m.get('VehicleTableId')
if m.get('VehicleTableItemId') is not None:
self.vehicle_table_item_id = m.get('VehicleTableItemId')
return self
class DeleteAiotVehicleTableItemResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteAiotVehicleTableItemResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteAiotVehicleTableItemResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteAiotVehicleTableItemResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteChannelRequest(TeaModel):
def __init__(
self,
device_codes: str = None,
):
self.device_codes = device_codes
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_codes is not None:
result['DeviceCodes'] = self.device_codes
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceCodes') is not None:
self.device_codes = m.get('DeviceCodes')
return self
class DeleteChannelResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteChannelResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteChannelResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteChannelResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteCorpGroupRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
group_id: str = None,
):
self.corp_id = corp_id
self.group_id = group_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.group_id is not None:
result['GroupId'] = self.group_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('GroupId') is not None:
self.group_id = m.get('GroupId')
return self
class DeleteCorpGroupResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.message = message
self.request_id = request_id
self.success = success
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class DeleteCorpGroupResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteCorpGroupResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteCorpGroupResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteDataSourceRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
data_source_id: str = None,
):
self.corp_id = corp_id
self.data_source_id = data_source_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.data_source_id is not None:
result['DataSourceId'] = self.data_source_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DataSourceId') is not None:
self.data_source_id = m.get('DataSourceId')
return self
class DeleteDataSourceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: str = None,
message: str = None,
):
self.code = code
self.data = data
self.message = message
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
return self
class DeleteDataSourceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteDataSourceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteDataSourceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteDataSourcesRequest(TeaModel):
def __init__(
self,
data_source_id_list: str = None,
):
self.data_source_id_list = data_source_id_list
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.data_source_id_list is not None:
result['DataSourceIdList'] = self.data_source_id_list
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DataSourceIdList') is not None:
self.data_source_id_list = m.get('DataSourceIdList')
return self
class DeleteDataSourcesResponseBodyData(TeaModel):
def __init__(
self,
code: str = None,
data_source_id: str = None,
message: str = None,
):
self.code = code
self.data_source_id = data_source_id
self.message = message
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data_source_id is not None:
result['DataSourceId'] = self.data_source_id
if self.message is not None:
result['Message'] = self.message
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('DataSourceId') is not None:
self.data_source_id = m.get('DataSourceId')
if m.get('Message') is not None:
self.message = m.get('Message')
return self
class DeleteDataSourcesResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: List[DeleteDataSourcesResponseBodyData] = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
for k in self.data:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
result['Data'] = []
if self.data is not None:
for k in self.data:
result['Data'].append(k.to_map() if k else None)
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
self.data = []
if m.get('Data') is not None:
for k in m.get('Data'):
temp_model = DeleteDataSourcesResponseBodyData()
self.data.append(temp_model.from_map(k))
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteDataSourcesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteDataSourcesResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteDataSourcesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteDeviceRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
gb_id: str = None,
):
self.corp_id = corp_id
self.gb_id = gb_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.gb_id is not None:
result['GbId'] = self.gb_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('GbId') is not None:
self.gb_id = m.get('GbId')
return self
class DeleteDeviceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteDeviceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteDeviceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteDeviceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteDeviceForInstanceRequestDevices(TeaModel):
def __init__(
self,
device_id: str = None,
region_id: str = None,
):
self.device_id = device_id
self.region_id = region_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.region_id is not None:
result['RegionId'] = self.region_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('RegionId') is not None:
self.region_id = m.get('RegionId')
return self
class DeleteDeviceForInstanceRequest(TeaModel):
def __init__(
self,
algorithm_id: str = None,
delete_instance_flag: bool = None,
device_count: str = None,
devices: List[DeleteDeviceForInstanceRequestDevices] = None,
instance_id: str = None,
project_id: str = None,
):
self.algorithm_id = algorithm_id
self.delete_instance_flag = delete_instance_flag
self.device_count = device_count
self.devices = devices
self.instance_id = instance_id
self.project_id = project_id
def validate(self):
if self.devices:
for k in self.devices:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_id is not None:
result['AlgorithmId'] = self.algorithm_id
if self.delete_instance_flag is not None:
result['DeleteInstanceFlag'] = self.delete_instance_flag
if self.device_count is not None:
result['DeviceCount'] = self.device_count
result['Devices'] = []
if self.devices is not None:
for k in self.devices:
result['Devices'].append(k.to_map() if k else None)
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.project_id is not None:
result['ProjectId'] = self.project_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmId') is not None:
self.algorithm_id = m.get('AlgorithmId')
if m.get('DeleteInstanceFlag') is not None:
self.delete_instance_flag = m.get('DeleteInstanceFlag')
if m.get('DeviceCount') is not None:
self.device_count = m.get('DeviceCount')
self.devices = []
if m.get('Devices') is not None:
for k in m.get('Devices'):
temp_model = DeleteDeviceForInstanceRequestDevices()
self.devices.append(temp_model.from_map(k))
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ProjectId') is not None:
self.project_id = m.get('ProjectId')
return self
class DeleteDeviceForInstanceShrinkRequest(TeaModel):
def __init__(
self,
algorithm_id: str = None,
delete_instance_flag: bool = None,
device_count: str = None,
devices_shrink: str = None,
instance_id: str = None,
project_id: str = None,
):
self.algorithm_id = algorithm_id
self.delete_instance_flag = delete_instance_flag
self.device_count = device_count
self.devices_shrink = devices_shrink
self.instance_id = instance_id
self.project_id = project_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_id is not None:
result['AlgorithmId'] = self.algorithm_id
if self.delete_instance_flag is not None:
result['DeleteInstanceFlag'] = self.delete_instance_flag
if self.device_count is not None:
result['DeviceCount'] = self.device_count
if self.devices_shrink is not None:
result['Devices'] = self.devices_shrink
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.project_id is not None:
result['ProjectId'] = self.project_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmId') is not None:
self.algorithm_id = m.get('AlgorithmId')
if m.get('DeleteInstanceFlag') is not None:
self.delete_instance_flag = m.get('DeleteInstanceFlag')
if m.get('DeviceCount') is not None:
self.device_count = m.get('DeviceCount')
if m.get('Devices') is not None:
self.devices_shrink = m.get('Devices')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ProjectId') is not None:
self.project_id = m.get('ProjectId')
return self
class DeleteDeviceForInstanceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.message = message
self.request_id = request_id
self.success = success
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class DeleteDeviceForInstanceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteDeviceForInstanceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteDeviceForInstanceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteDevicesRequest(TeaModel):
def __init__(
self,
device_id_list: str = None,
):
self.device_id_list = device_id_list
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_id_list is not None:
result['DeviceIdList'] = self.device_id_list
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceIdList') is not None:
self.device_id_list = m.get('DeviceIdList')
return self
class DeleteDevicesResponseBodyData(TeaModel):
def __init__(
self,
code: str = None,
description: str = None,
device_id: str = None,
):
self.code = code
self.description = description
self.device_id = device_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.description is not None:
result['Description'] = self.description
if self.device_id is not None:
result['DeviceId'] = self.device_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Description') is not None:
self.description = m.get('Description')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
return self
class DeleteDevicesResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: List[DeleteDevicesResponseBodyData] = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
for k in self.data:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
result['Data'] = []
if self.data is not None:
for k in self.data:
result['Data'].append(k.to_map() if k else None)
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
self.data = []
if m.get('Data') is not None:
for k in m.get('Data'):
temp_model = DeleteDevicesResponseBodyData()
self.data.append(temp_model.from_map(k))
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteDevicesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteDevicesResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteDevicesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteDoubleVerificationGroupRequest(TeaModel):
def __init__(
self,
double_verification_group_id: str = None,
id: str = None,
):
self.double_verification_group_id = double_verification_group_id
self.id = id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.double_verification_group_id is not None:
result['DoubleVerificationGroupId'] = self.double_verification_group_id
if self.id is not None:
result['Id'] = self.id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DoubleVerificationGroupId') is not None:
self.double_verification_group_id = m.get('DoubleVerificationGroupId')
if m.get('Id') is not None:
self.id = m.get('Id')
return self
class DeleteDoubleVerificationGroupResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteDoubleVerificationGroupResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteDoubleVerificationGroupResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteDoubleVerificationGroupResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteIPCDeviceRequest(TeaModel):
def __init__(
self,
device_codes: str = None,
):
self.device_codes = device_codes
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_codes is not None:
result['DeviceCodes'] = self.device_codes
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceCodes') is not None:
self.device_codes = m.get('DeviceCodes')
return self
class DeleteIPCDeviceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteIPCDeviceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteIPCDeviceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteIPCDeviceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteModelServiceRequest(TeaModel):
def __init__(
self,
model_service_id: str = None,
):
self.model_service_id = model_service_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.model_service_id is not None:
result['ModelServiceId'] = self.model_service_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ModelServiceId') is not None:
self.model_service_id = m.get('ModelServiceId')
return self
class DeleteModelServiceResponseBodyData(TeaModel):
def __init__(
self,
model_service_instance_id: str = None,
model_service_instance_name: int = None,
model_service_status: str = None,
):
self.model_service_instance_id = model_service_instance_id
self.model_service_instance_name = model_service_instance_name
self.model_service_status = model_service_status
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.model_service_instance_id is not None:
result['ModelServiceInstanceId'] = self.model_service_instance_id
if self.model_service_instance_name is not None:
result['ModelServiceInstanceName'] = self.model_service_instance_name
if self.model_service_status is not None:
result['ModelServiceStatus'] = self.model_service_status
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ModelServiceInstanceId') is not None:
self.model_service_instance_id = m.get('ModelServiceInstanceId')
if m.get('ModelServiceInstanceName') is not None:
self.model_service_instance_name = m.get('ModelServiceInstanceName')
if m.get('ModelServiceStatus') is not None:
self.model_service_status = m.get('ModelServiceStatus')
return self
class DeleteModelServiceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: DeleteModelServiceResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = DeleteModelServiceResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteModelServiceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteModelServiceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteModelServiceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteNVRDeviceRequest(TeaModel):
def __init__(
self,
device_codes: str = None,
):
self.device_codes = device_codes
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_codes is not None:
result['DeviceCodes'] = self.device_codes
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceCodes') is not None:
self.device_codes = m.get('DeviceCodes')
return self
class DeleteNVRDeviceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteNVRDeviceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteNVRDeviceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteNVRDeviceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteProfileRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
isv_sub_id: str = None,
profile_id: int = None,
):
self.corp_id = corp_id
self.isv_sub_id = isv_sub_id
self.profile_id = profile_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.profile_id is not None:
result['ProfileId'] = self.profile_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('ProfileId') is not None:
self.profile_id = m.get('ProfileId')
return self
class DeleteProfileResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: bool = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteProfileResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteProfileResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteProfileResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteProfileCatalogRequest(TeaModel):
def __init__(
self,
catalog_id: str = None,
corp_id: str = None,
isv_sub_id: str = None,
):
self.catalog_id = catalog_id
self.corp_id = corp_id
self.isv_sub_id = isv_sub_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.catalog_id is not None:
result['CatalogId'] = self.catalog_id
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CatalogId') is not None:
self.catalog_id = m.get('CatalogId')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
return self
class DeleteProfileCatalogResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: bool = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteProfileCatalogResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteProfileCatalogResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteProfileCatalogResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteProjectRequest(TeaModel):
def __init__(
self,
project_ids: str = None,
):
# 项目id,多个以”,“隔开
self.project_ids = project_ids
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.project_ids is not None:
result['ProjectIds'] = self.project_ids
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ProjectIds') is not None:
self.project_ids = m.get('ProjectIds')
return self
class DeleteProjectResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.message = message
self.request_id = request_id
self.success = success
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class DeleteProjectResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteProjectResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteProjectResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteRecordsRequest(TeaModel):
def __init__(
self,
algorithm_type: str = None,
attribute_name: str = None,
corp_id: str = None,
operator_type: str = None,
value: str = None,
):
self.algorithm_type = algorithm_type
self.attribute_name = attribute_name
self.corp_id = corp_id
self.operator_type = operator_type
self.value = value
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_type is not None:
result['AlgorithmType'] = self.algorithm_type
if self.attribute_name is not None:
result['AttributeName'] = self.attribute_name
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.operator_type is not None:
result['OperatorType'] = self.operator_type
if self.value is not None:
result['Value'] = self.value
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmType') is not None:
self.algorithm_type = m.get('AlgorithmType')
if m.get('AttributeName') is not None:
self.attribute_name = m.get('AttributeName')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('OperatorType') is not None:
self.operator_type = m.get('OperatorType')
if m.get('Value') is not None:
self.value = m.get('Value')
return self
class DeleteRecordsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteRecordsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteRecordsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteRecordsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteSearchTablesRequest(TeaModel):
def __init__(
self,
search_table_ids: str = None,
):
self.search_table_ids = search_table_ids
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.search_table_ids is not None:
result['SearchTableIds'] = self.search_table_ids
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('SearchTableIds') is not None:
self.search_table_ids = m.get('SearchTableIds')
return self
class DeleteSearchTablesResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
success: str = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class DeleteSearchTablesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteSearchTablesResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteSearchTablesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteSubscribeRequest(TeaModel):
def __init__(
self,
device_id: str = None,
):
self.device_id = device_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_id is not None:
result['DeviceId'] = self.device_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
return self
class DeleteSubscribeResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteSubscribeResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteSubscribeResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteSubscribeResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteUserRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
isv_sub_id: str = None,
user_id: int = None,
):
self.corp_id = corp_id
self.isv_sub_id = isv_sub_id
self.user_id = user_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.user_id is not None:
result['UserId'] = self.user_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('UserId') is not None:
self.user_id = m.get('UserId')
return self
class DeleteUserResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: bool = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteUserResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteUserResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteUserResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteUserGroupRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
isv_sub_id: str = None,
user_group_id: str = None,
):
self.corp_id = corp_id
self.isv_sub_id = isv_sub_id
self.user_group_id = user_group_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.user_group_id is not None:
result['UserGroupId'] = self.user_group_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('UserGroupId') is not None:
self.user_group_id = m.get('UserGroupId')
return self
class DeleteUserGroupResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: bool = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteUserGroupResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteUserGroupResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteUserGroupResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteVideoSummaryTaskRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
task_id: str = None,
):
self.corp_id = corp_id
self.task_id = task_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.task_id is not None:
result['TaskId'] = self.task_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('TaskId') is not None:
self.task_id = m.get('TaskId')
return self
class DeleteVideoSummaryTaskResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteVideoSummaryTaskResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteVideoSummaryTaskResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteVideoSummaryTaskResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteWatchPoliciesRequest(TeaModel):
def __init__(
self,
watch_policy_ids: str = None,
):
self.watch_policy_ids = watch_policy_ids
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.watch_policy_ids is not None:
result['WatchPolicyIds'] = self.watch_policy_ids
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('WatchPolicyIds') is not None:
self.watch_policy_ids = m.get('WatchPolicyIds')
return self
class DeleteWatchPoliciesResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class DeleteWatchPoliciesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteWatchPoliciesResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteWatchPoliciesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteWatchTasksRequest(TeaModel):
def __init__(
self,
watch_task_ids: str = None,
):
self.watch_task_ids = watch_task_ids
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.watch_task_ids is not None:
result['WatchTaskIds'] = self.watch_task_ids
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('WatchTaskIds') is not None:
self.watch_task_ids = m.get('WatchTaskIds')
return self
class DeleteWatchTasksResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class DeleteWatchTasksResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DeleteWatchTasksResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DeleteWatchTasksResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeAIInstanceRequest(TeaModel):
def __init__(
self,
instance_id: str = None,
instance_name: str = None,
instance_type: str = None,
page_number: int = None,
page_size: int = None,
project_id: str = None,
):
# 实例id
self.instance_id = instance_id
# 实例名称
self.instance_name = instance_name
# 实例类型
self.instance_type = instance_type
# 当前页序号
self.page_number = page_number
# 每页显示多少条数据
self.page_size = page_size
# 项目id
self.project_id = project_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.instance_name is not None:
result['InstanceName'] = self.instance_name
if self.instance_type is not None:
result['InstanceType'] = self.instance_type
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.project_id is not None:
result['ProjectId'] = self.project_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('InstanceName') is not None:
self.instance_name = m.get('InstanceName')
if m.get('InstanceType') is not None:
self.instance_type = m.get('InstanceType')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('ProjectId') is not None:
self.project_id = m.get('ProjectId')
return self
class DescribeAIInstanceResponseBodyDataRecords(TeaModel):
def __init__(
self,
acu_used: int = None,
algorithm_id: str = None,
algorithm_name: str = None,
camera_number: int = None,
compute_type: str = None,
create_date_time: str = None,
data_source: str = None,
data_source_times: str = None,
data_type: str = None,
instance_id: str = None,
instance_name: str = None,
instance_type: str = None,
schedule_cycle_dates: str = None,
schedule_times: str = None,
schedule_type: str = None,
spf: int = None,
status: str = None,
storage: float = None,
):
# ACU使用数量
self.acu_used = acu_used
# 算子ID
self.algorithm_id = algorithm_id
# 算子名称
self.algorithm_name = algorithm_name
# 实例中计算的摄像机数量。 仅当DataSource是Camera时返回。
self.camera_number = camera_number
# 计算类型
self.compute_type = compute_type
# 实例创建时间
self.create_date_time = create_date_time
# 数据来源
self.data_source = data_source
# 数据源时间段
self.data_source_times = data_source_times
# 数据类型
self.data_type = data_type
# 实例ID
self.instance_id = instance_id
# 实例名称
self.instance_name = instance_name
self.instance_type = instance_type
# 含义随ScheduleType取值不同
self.schedule_cycle_dates = schedule_cycle_dates
# 执行时间段
self.schedule_times = schedule_times
# 调度类型
self.schedule_type = schedule_type
# 多少秒抽取1帧,取值范围[0, 3600]。 0表示不抽帧。
self.spf = spf
# 实力状态
self.status = status
# 预计存储量
self.storage = storage
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.acu_used is not None:
result['AcuUsed'] = self.acu_used
if self.algorithm_id is not None:
result['AlgorithmId'] = self.algorithm_id
if self.algorithm_name is not None:
result['AlgorithmName'] = self.algorithm_name
if self.camera_number is not None:
result['CameraNumber'] = self.camera_number
if self.compute_type is not None:
result['ComputeType'] = self.compute_type
if self.create_date_time is not None:
result['CreateDateTime'] = self.create_date_time
if self.data_source is not None:
result['DataSource'] = self.data_source
if self.data_source_times is not None:
result['DataSourceTimes'] = self.data_source_times
if self.data_type is not None:
result['DataType'] = self.data_type
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.instance_name is not None:
result['InstanceName'] = self.instance_name
if self.instance_type is not None:
result['InstanceType'] = self.instance_type
if self.schedule_cycle_dates is not None:
result['ScheduleCycleDates'] = self.schedule_cycle_dates
if self.schedule_times is not None:
result['ScheduleTimes'] = self.schedule_times
if self.schedule_type is not None:
result['ScheduleType'] = self.schedule_type
if self.spf is not None:
result['Spf'] = self.spf
if self.status is not None:
result['Status'] = self.status
if self.storage is not None:
result['Storage'] = self.storage
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AcuUsed') is not None:
self.acu_used = m.get('AcuUsed')
if m.get('AlgorithmId') is not None:
self.algorithm_id = m.get('AlgorithmId')
if m.get('AlgorithmName') is not None:
self.algorithm_name = m.get('AlgorithmName')
if m.get('CameraNumber') is not None:
self.camera_number = m.get('CameraNumber')
if m.get('ComputeType') is not None:
self.compute_type = m.get('ComputeType')
if m.get('CreateDateTime') is not None:
self.create_date_time = m.get('CreateDateTime')
if m.get('DataSource') is not None:
self.data_source = m.get('DataSource')
if m.get('DataSourceTimes') is not None:
self.data_source_times = m.get('DataSourceTimes')
if m.get('DataType') is not None:
self.data_type = m.get('DataType')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('InstanceName') is not None:
self.instance_name = m.get('InstanceName')
if m.get('InstanceType') is not None:
self.instance_type = m.get('InstanceType')
if m.get('ScheduleCycleDates') is not None:
self.schedule_cycle_dates = m.get('ScheduleCycleDates')
if m.get('ScheduleTimes') is not None:
self.schedule_times = m.get('ScheduleTimes')
if m.get('ScheduleType') is not None:
self.schedule_type = m.get('ScheduleType')
if m.get('Spf') is not None:
self.spf = m.get('Spf')
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('Storage') is not None:
self.storage = m.get('Storage')
return self
class DescribeAIInstanceResponseBodyData(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
records: List[DescribeAIInstanceResponseBodyDataRecords] = None,
total_count: int = None,
):
# 当前页序号
self.page_number = page_number
# 每页显示多少条
self.page_size = page_size
# 返回数据条目
self.records = records
# 总数据数
self.total_count = total_count
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = DescribeAIInstanceResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
return self
class DescribeAIInstanceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: DescribeAIInstanceResponseBodyData = None,
message: str = None,
request_id: str = None,
):
# 返回错误码
self.code = code
# 返回数据
self.data = data
# 错误信息
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = DescribeAIInstanceResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DescribeAIInstanceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeAIInstanceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeAIInstanceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeAiotDevicesRequest(TeaModel):
def __init__(
self,
corp_id_list: str = None,
id_list: str = None,
page_num: int = None,
page_size: int = None,
):
self.corp_id_list = corp_id_list
self.id_list = id_list
self.page_num = page_num
self.page_size = page_size
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id_list is not None:
result['CorpIdList'] = self.corp_id_list
if self.id_list is not None:
result['IdList'] = self.id_list
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpIdList') is not None:
self.corp_id_list = m.get('CorpIdList')
if m.get('IdList') is not None:
self.id_list = m.get('IdList')
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
return self
class DescribeAiotDevicesResponseBodyAiotDevicesAiotDeviceList(TeaModel):
def __init__(
self,
cap_direction: str = None,
corp_id: str = None,
device_id: str = None,
device_type: str = None,
firmware_version: str = None,
ipaddr: str = None,
ipv6addr: str = None,
ipv_4gateway: str = None,
ipv_4netmask: str = None,
id: str = None,
is_online: str = None,
latitude: float = None,
longitude: float = None,
mac: str = None,
manufacturer: str = None,
model: str = None,
monitor_area_desc: str = None,
monitor_direction: str = None,
name: str = None,
org_code: str = None,
owner_aps_id: str = None,
password: str = None,
place: str = None,
place_code: str = None,
port: int = None,
serial_nuber: str = None,
user_id: str = None,
):
self.cap_direction = cap_direction
self.corp_id = corp_id
self.device_id = device_id
self.device_type = device_type
self.firmware_version = firmware_version
self.ipaddr = ipaddr
self.ipv6addr = ipv6addr
self.ipv_4gateway = ipv_4gateway
self.ipv_4netmask = ipv_4netmask
self.id = id
self.is_online = is_online
self.latitude = latitude
self.longitude = longitude
self.mac = mac
self.manufacturer = manufacturer
self.model = model
self.monitor_area_desc = monitor_area_desc
self.monitor_direction = monitor_direction
self.name = name
self.org_code = org_code
self.owner_aps_id = owner_aps_id
self.password = password
self.place = place
self.place_code = place_code
self.port = port
self.serial_nuber = serial_nuber
self.user_id = user_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.cap_direction is not None:
result['CapDirection'] = self.cap_direction
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.device_type is not None:
result['DeviceType'] = self.device_type
if self.firmware_version is not None:
result['FirmwareVersion'] = self.firmware_version
if self.ipaddr is not None:
result['IPAddr'] = self.ipaddr
if self.ipv6addr is not None:
result['IPV6Addr'] = self.ipv6addr
if self.ipv_4gateway is not None:
result['IPv4Gateway'] = self.ipv_4gateway
if self.ipv_4netmask is not None:
result['IPv4Netmask'] = self.ipv_4netmask
if self.id is not None:
result['Id'] = self.id
if self.is_online is not None:
result['IsOnline'] = self.is_online
if self.latitude is not None:
result['Latitude'] = self.latitude
if self.longitude is not None:
result['Longitude'] = self.longitude
if self.mac is not None:
result['MAC'] = self.mac
if self.manufacturer is not None:
result['Manufacturer'] = self.manufacturer
if self.model is not None:
result['Model'] = self.model
if self.monitor_area_desc is not None:
result['MonitorAreaDesc'] = self.monitor_area_desc
if self.monitor_direction is not None:
result['MonitorDirection'] = self.monitor_direction
if self.name is not None:
result['Name'] = self.name
if self.org_code is not None:
result['OrgCode'] = self.org_code
if self.owner_aps_id is not None:
result['OwnerApsID'] = self.owner_aps_id
if self.password is not None:
result['Password'] = self.password
if self.place is not None:
result['Place'] = self.place
if self.place_code is not None:
result['PlaceCode'] = self.place_code
if self.port is not None:
result['Port'] = self.port
if self.serial_nuber is not None:
result['SerialNuber'] = self.serial_nuber
if self.user_id is not None:
result['UserId'] = self.user_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CapDirection') is not None:
self.cap_direction = m.get('CapDirection')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('DeviceType') is not None:
self.device_type = m.get('DeviceType')
if m.get('FirmwareVersion') is not None:
self.firmware_version = m.get('FirmwareVersion')
if m.get('IPAddr') is not None:
self.ipaddr = m.get('IPAddr')
if m.get('IPV6Addr') is not None:
self.ipv6addr = m.get('IPV6Addr')
if m.get('IPv4Gateway') is not None:
self.ipv_4gateway = m.get('IPv4Gateway')
if m.get('IPv4Netmask') is not None:
self.ipv_4netmask = m.get('IPv4Netmask')
if m.get('Id') is not None:
self.id = m.get('Id')
if m.get('IsOnline') is not None:
self.is_online = m.get('IsOnline')
if m.get('Latitude') is not None:
self.latitude = m.get('Latitude')
if m.get('Longitude') is not None:
self.longitude = m.get('Longitude')
if m.get('MAC') is not None:
self.mac = m.get('MAC')
if m.get('Manufacturer') is not None:
self.manufacturer = m.get('Manufacturer')
if m.get('Model') is not None:
self.model = m.get('Model')
if m.get('MonitorAreaDesc') is not None:
self.monitor_area_desc = m.get('MonitorAreaDesc')
if m.get('MonitorDirection') is not None:
self.monitor_direction = m.get('MonitorDirection')
if m.get('Name') is not None:
self.name = m.get('Name')
if m.get('OrgCode') is not None:
self.org_code = m.get('OrgCode')
if m.get('OwnerApsID') is not None:
self.owner_aps_id = m.get('OwnerApsID')
if m.get('Password') is not None:
self.password = m.get('Password')
if m.get('Place') is not None:
self.place = m.get('Place')
if m.get('PlaceCode') is not None:
self.place_code = m.get('PlaceCode')
if m.get('Port') is not None:
self.port = m.get('Port')
if m.get('SerialNuber') is not None:
self.serial_nuber = m.get('SerialNuber')
if m.get('UserId') is not None:
self.user_id = m.get('UserId')
return self
class DescribeAiotDevicesResponseBodyAiotDevices(TeaModel):
def __init__(
self,
aiot_device_list: List[DescribeAiotDevicesResponseBodyAiotDevicesAiotDeviceList] = None,
page_num: int = None,
page_size: int = None,
total_num: int = None,
):
self.aiot_device_list = aiot_device_list
self.page_num = page_num
self.page_size = page_size
self.total_num = total_num
def validate(self):
if self.aiot_device_list:
for k in self.aiot_device_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['AiotDeviceList'] = []
if self.aiot_device_list is not None:
for k in self.aiot_device_list:
result['AiotDeviceList'].append(k.to_map() if k else None)
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.total_num is not None:
result['TotalNum'] = self.total_num
return result
def from_map(self, m: dict = None):
m = m or dict()
self.aiot_device_list = []
if m.get('AiotDeviceList') is not None:
for k in m.get('AiotDeviceList'):
temp_model = DescribeAiotDevicesResponseBodyAiotDevicesAiotDeviceList()
self.aiot_device_list.append(temp_model.from_map(k))
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('TotalNum') is not None:
self.total_num = m.get('TotalNum')
return self
class DescribeAiotDevicesResponseBody(TeaModel):
def __init__(
self,
aiot_devices: DescribeAiotDevicesResponseBodyAiotDevices = None,
code: str = None,
message: str = None,
request_id: str = None,
):
self.aiot_devices = aiot_devices
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
if self.aiot_devices:
self.aiot_devices.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.aiot_devices is not None:
result['AiotDevices'] = self.aiot_devices.to_map()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AiotDevices') is not None:
temp_model = DescribeAiotDevicesResponseBodyAiotDevices()
self.aiot_devices = temp_model.from_map(m['AiotDevices'])
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DescribeAiotDevicesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeAiotDevicesResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeAiotDevicesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeAiotPersonTableItemsRequest(TeaModel):
def __init__(
self,
id: str = None,
page_num: int = None,
page_size: int = None,
person_table_id: str = None,
person_table_item_id: str = None,
):
self.id = id
self.page_num = page_num
self.page_size = page_size
self.person_table_id = person_table_id
self.person_table_item_id = person_table_item_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['Id'] = self.id
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.person_table_id is not None:
result['PersonTableId'] = self.person_table_id
if self.person_table_item_id is not None:
result['PersonTableItemId'] = self.person_table_item_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Id') is not None:
self.id = m.get('Id')
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('PersonTableId') is not None:
self.person_table_id = m.get('PersonTableId')
if m.get('PersonTableItemId') is not None:
self.person_table_item_id = m.get('PersonTableItemId')
return self
class DescribeAiotPersonTableItemsResponseBodyPersonTableItemsPersonTableItemListIdentificationList(TeaModel):
def __init__(
self,
number: str = None,
type: int = None,
):
self.number = number
self.type = type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.number is not None:
result['Number'] = self.number
if self.type is not None:
result['Type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Number') is not None:
self.number = m.get('Number')
if m.get('Type') is not None:
self.type = m.get('Type')
return self
class DescribeAiotPersonTableItemsResponseBodyPersonTableItemsPersonTableItemListImageListFeatureInfo(TeaModel):
def __init__(
self,
algorithm_type: str = None,
algorithm_version: str = None,
feature_data: str = None,
image_id: str = None,
object_id: str = None,
table_id: str = None,
vendor: str = None,
):
self.algorithm_type = algorithm_type
self.algorithm_version = algorithm_version
self.feature_data = feature_data
self.image_id = image_id
self.object_id = object_id
self.table_id = table_id
self.vendor = vendor
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_type is not None:
result['AlgorithmType'] = self.algorithm_type
if self.algorithm_version is not None:
result['AlgorithmVersion'] = self.algorithm_version
if self.feature_data is not None:
result['FeatureData'] = self.feature_data
if self.image_id is not None:
result['ImageId'] = self.image_id
if self.object_id is not None:
result['ObjectId'] = self.object_id
if self.table_id is not None:
result['TableId'] = self.table_id
if self.vendor is not None:
result['Vendor'] = self.vendor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmType') is not None:
self.algorithm_type = m.get('AlgorithmType')
if m.get('AlgorithmVersion') is not None:
self.algorithm_version = m.get('AlgorithmVersion')
if m.get('FeatureData') is not None:
self.feature_data = m.get('FeatureData')
if m.get('ImageId') is not None:
self.image_id = m.get('ImageId')
if m.get('ObjectId') is not None:
self.object_id = m.get('ObjectId')
if m.get('TableId') is not None:
self.table_id = m.get('TableId')
if m.get('Vendor') is not None:
self.vendor = m.get('Vendor')
return self
class DescribeAiotPersonTableItemsResponseBodyPersonTableItemsPersonTableItemListImageList(TeaModel):
def __init__(
self,
data: str = None,
device_id: str = None,
event_sort: str = None,
feature_info: DescribeAiotPersonTableItemsResponseBodyPersonTableItemsPersonTableItemListImageListFeatureInfo = None,
file_format: str = None,
height: int = None,
image_id: str = None,
shot_time: str = None,
size: int = None,
storage_path: str = None,
type: str = None,
width: int = None,
):
self.data = data
self.device_id = device_id
self.event_sort = event_sort
self.feature_info = feature_info
self.file_format = file_format
self.height = height
self.image_id = image_id
self.shot_time = shot_time
self.size = size
self.storage_path = storage_path
self.type = type
self.width = width
def validate(self):
if self.feature_info:
self.feature_info.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.data is not None:
result['Data'] = self.data
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.event_sort is not None:
result['EventSort'] = self.event_sort
if self.feature_info is not None:
result['FeatureInfo'] = self.feature_info.to_map()
if self.file_format is not None:
result['FileFormat'] = self.file_format
if self.height is not None:
result['Height'] = self.height
if self.image_id is not None:
result['ImageId'] = self.image_id
if self.shot_time is not None:
result['ShotTime'] = self.shot_time
if self.size is not None:
result['Size'] = self.size
if self.storage_path is not None:
result['StoragePath'] = self.storage_path
if self.type is not None:
result['Type'] = self.type
if self.width is not None:
result['Width'] = self.width
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('EventSort') is not None:
self.event_sort = m.get('EventSort')
if m.get('FeatureInfo') is not None:
temp_model = DescribeAiotPersonTableItemsResponseBodyPersonTableItemsPersonTableItemListImageListFeatureInfo()
self.feature_info = temp_model.from_map(m['FeatureInfo'])
if m.get('FileFormat') is not None:
self.file_format = m.get('FileFormat')
if m.get('Height') is not None:
self.height = m.get('Height')
if m.get('ImageId') is not None:
self.image_id = m.get('ImageId')
if m.get('ShotTime') is not None:
self.shot_time = m.get('ShotTime')
if m.get('Size') is not None:
self.size = m.get('Size')
if m.get('StoragePath') is not None:
self.storage_path = m.get('StoragePath')
if m.get('Type') is not None:
self.type = m.get('Type')
if m.get('Width') is not None:
self.width = m.get('Width')
return self
class DescribeAiotPersonTableItemsResponseBodyPersonTableItemsPersonTableItemList(TeaModel):
def __init__(
self,
identification_list: List[DescribeAiotPersonTableItemsResponseBodyPersonTableItemsPersonTableItemListIdentificationList] = None,
identification_num: int = None,
image_list: List[DescribeAiotPersonTableItemsResponseBodyPersonTableItemsPersonTableItemListImageList] = None,
image_num: int = None,
last_change: str = None,
person_code: str = None,
person_id: str = None,
person_name: str = None,
person_table_id: str = None,
remarks: str = None,
):
self.identification_list = identification_list
self.identification_num = identification_num
self.image_list = image_list
self.image_num = image_num
self.last_change = last_change
self.person_code = person_code
self.person_id = person_id
self.person_name = person_name
self.person_table_id = person_table_id
self.remarks = remarks
def validate(self):
if self.identification_list:
for k in self.identification_list:
if k:
k.validate()
if self.image_list:
for k in self.image_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['IdentificationList'] = []
if self.identification_list is not None:
for k in self.identification_list:
result['IdentificationList'].append(k.to_map() if k else None)
if self.identification_num is not None:
result['IdentificationNum'] = self.identification_num
result['ImageList'] = []
if self.image_list is not None:
for k in self.image_list:
result['ImageList'].append(k.to_map() if k else None)
if self.image_num is not None:
result['ImageNum'] = self.image_num
if self.last_change is not None:
result['LastChange'] = self.last_change
if self.person_code is not None:
result['PersonCode'] = self.person_code
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.person_name is not None:
result['PersonName'] = self.person_name
if self.person_table_id is not None:
result['PersonTableId'] = self.person_table_id
if self.remarks is not None:
result['Remarks'] = self.remarks
return result
def from_map(self, m: dict = None):
m = m or dict()
self.identification_list = []
if m.get('IdentificationList') is not None:
for k in m.get('IdentificationList'):
temp_model = DescribeAiotPersonTableItemsResponseBodyPersonTableItemsPersonTableItemListIdentificationList()
self.identification_list.append(temp_model.from_map(k))
if m.get('IdentificationNum') is not None:
self.identification_num = m.get('IdentificationNum')
self.image_list = []
if m.get('ImageList') is not None:
for k in m.get('ImageList'):
temp_model = DescribeAiotPersonTableItemsResponseBodyPersonTableItemsPersonTableItemListImageList()
self.image_list.append(temp_model.from_map(k))
if m.get('ImageNum') is not None:
self.image_num = m.get('ImageNum')
if m.get('LastChange') is not None:
self.last_change = m.get('LastChange')
if m.get('PersonCode') is not None:
self.person_code = m.get('PersonCode')
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('PersonName') is not None:
self.person_name = m.get('PersonName')
if m.get('PersonTableId') is not None:
self.person_table_id = m.get('PersonTableId')
if m.get('Remarks') is not None:
self.remarks = m.get('Remarks')
return self
class DescribeAiotPersonTableItemsResponseBodyPersonTableItems(TeaModel):
def __init__(
self,
page_num: int = None,
page_size: int = None,
person_table_item_list: List[DescribeAiotPersonTableItemsResponseBodyPersonTableItemsPersonTableItemList] = None,
total_num: int = None,
):
self.page_num = page_num
self.page_size = page_size
self.person_table_item_list = person_table_item_list
self.total_num = total_num
def validate(self):
if self.person_table_item_list:
for k in self.person_table_item_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
result['PersonTableItemList'] = []
if self.person_table_item_list is not None:
for k in self.person_table_item_list:
result['PersonTableItemList'].append(k.to_map() if k else None)
if self.total_num is not None:
result['TotalNum'] = self.total_num
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.person_table_item_list = []
if m.get('PersonTableItemList') is not None:
for k in m.get('PersonTableItemList'):
temp_model = DescribeAiotPersonTableItemsResponseBodyPersonTableItemsPersonTableItemList()
self.person_table_item_list.append(temp_model.from_map(k))
if m.get('TotalNum') is not None:
self.total_num = m.get('TotalNum')
return self
class DescribeAiotPersonTableItemsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
person_table_items: DescribeAiotPersonTableItemsResponseBodyPersonTableItems = None,
request_id: str = None,
):
self.code = code
self.message = message
self.person_table_items = person_table_items
# Id of the request
self.request_id = request_id
def validate(self):
if self.person_table_items:
self.person_table_items.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.person_table_items is not None:
result['PersonTableItems'] = self.person_table_items.to_map()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('PersonTableItems') is not None:
temp_model = DescribeAiotPersonTableItemsResponseBodyPersonTableItems()
self.person_table_items = temp_model.from_map(m['PersonTableItems'])
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DescribeAiotPersonTableItemsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeAiotPersonTableItemsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeAiotPersonTableItemsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeAiotPersonTablesRequest(TeaModel):
def __init__(
self,
id: str = None,
person_table_id_list: str = None,
):
self.id = id
self.person_table_id_list = person_table_id_list
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['Id'] = self.id
if self.person_table_id_list is not None:
result['PersonTableIdList'] = self.person_table_id_list
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Id') is not None:
self.id = m.get('Id')
if m.get('PersonTableIdList') is not None:
self.person_table_id_list = m.get('PersonTableIdList')
return self
class DescribeAiotPersonTablesResponseBodyPersonTableList(TeaModel):
def __init__(
self,
device_id: str = None,
face_num: int = None,
last_change: str = None,
name: str = None,
person_num: int = None,
person_table_id: str = None,
total_person_num: int = None,
type: int = None,
verification_model_list: List[int] = None,
):
self.device_id = device_id
self.face_num = face_num
self.last_change = last_change
self.name = name
self.person_num = person_num
self.person_table_id = person_table_id
self.total_person_num = total_person_num
self.type = type
self.verification_model_list = verification_model_list
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.face_num is not None:
result['FaceNum'] = self.face_num
if self.last_change is not None:
result['LastChange'] = self.last_change
if self.name is not None:
result['Name'] = self.name
if self.person_num is not None:
result['PersonNum'] = self.person_num
if self.person_table_id is not None:
result['PersonTableId'] = self.person_table_id
if self.total_person_num is not None:
result['TotalPersonNum'] = self.total_person_num
if self.type is not None:
result['Type'] = self.type
if self.verification_model_list is not None:
result['VerificationModelList'] = self.verification_model_list
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('FaceNum') is not None:
self.face_num = m.get('FaceNum')
if m.get('LastChange') is not None:
self.last_change = m.get('LastChange')
if m.get('Name') is not None:
self.name = m.get('Name')
if m.get('PersonNum') is not None:
self.person_num = m.get('PersonNum')
if m.get('PersonTableId') is not None:
self.person_table_id = m.get('PersonTableId')
if m.get('TotalPersonNum') is not None:
self.total_person_num = m.get('TotalPersonNum')
if m.get('Type') is not None:
self.type = m.get('Type')
if m.get('VerificationModelList') is not None:
self.verification_model_list = m.get('VerificationModelList')
return self
class DescribeAiotPersonTablesResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
person_table_list: List[DescribeAiotPersonTablesResponseBodyPersonTableList] = None,
request_id: str = None,
):
self.code = code
self.message = message
self.person_table_list = person_table_list
# Id of the request
self.request_id = request_id
def validate(self):
if self.person_table_list:
for k in self.person_table_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
result['PersonTableList'] = []
if self.person_table_list is not None:
for k in self.person_table_list:
result['PersonTableList'].append(k.to_map() if k else None)
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
self.person_table_list = []
if m.get('PersonTableList') is not None:
for k in m.get('PersonTableList'):
temp_model = DescribeAiotPersonTablesResponseBodyPersonTableList()
self.person_table_list.append(temp_model.from_map(k))
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DescribeAiotPersonTablesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeAiotPersonTablesResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeAiotPersonTablesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeAiotVehicleTableItemsRequest(TeaModel):
def __init__(
self,
id: str = None,
page_num: int = None,
page_size: int = None,
vehicle_table_id: str = None,
vehicle_table_item_id: str = None,
):
self.id = id
self.page_num = page_num
self.page_size = page_size
self.vehicle_table_id = vehicle_table_id
self.vehicle_table_item_id = vehicle_table_item_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['Id'] = self.id
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.vehicle_table_id is not None:
result['VehicleTableId'] = self.vehicle_table_id
if self.vehicle_table_item_id is not None:
result['VehicleTableItemId'] = self.vehicle_table_item_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Id') is not None:
self.id = m.get('Id')
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('VehicleTableId') is not None:
self.vehicle_table_id = m.get('VehicleTableId')
if m.get('VehicleTableItemId') is not None:
self.vehicle_table_item_id = m.get('VehicleTableItemId')
return self
class DescribeAiotVehicleTableItemsResponseBodyVehicleTableItemsVehicleTableItemList(TeaModel):
def __init__(
self,
begin_time: str = None,
end_time: str = None,
owner_name: str = None,
phone_no: str = None,
plate_no: str = None,
remarks: str = None,
vehicle_table_id: str = None,
vehicle_table_item_id: str = None,
):
self.begin_time = begin_time
self.end_time = end_time
self.owner_name = owner_name
self.phone_no = phone_no
self.plate_no = plate_no
self.remarks = remarks
self.vehicle_table_id = vehicle_table_id
self.vehicle_table_item_id = vehicle_table_item_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.begin_time is not None:
result['BeginTime'] = self.begin_time
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.owner_name is not None:
result['OwnerName'] = self.owner_name
if self.phone_no is not None:
result['PhoneNo'] = self.phone_no
if self.plate_no is not None:
result['PlateNo'] = self.plate_no
if self.remarks is not None:
result['Remarks'] = self.remarks
if self.vehicle_table_id is not None:
result['VehicleTableId'] = self.vehicle_table_id
if self.vehicle_table_item_id is not None:
result['VehicleTableItemId'] = self.vehicle_table_item_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BeginTime') is not None:
self.begin_time = m.get('BeginTime')
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('OwnerName') is not None:
self.owner_name = m.get('OwnerName')
if m.get('PhoneNo') is not None:
self.phone_no = m.get('PhoneNo')
if m.get('PlateNo') is not None:
self.plate_no = m.get('PlateNo')
if m.get('Remarks') is not None:
self.remarks = m.get('Remarks')
if m.get('VehicleTableId') is not None:
self.vehicle_table_id = m.get('VehicleTableId')
if m.get('VehicleTableItemId') is not None:
self.vehicle_table_item_id = m.get('VehicleTableItemId')
return self
class DescribeAiotVehicleTableItemsResponseBodyVehicleTableItems(TeaModel):
def __init__(
self,
page_num: int = None,
page_size: int = None,
total_num: int = None,
vehicle_table_item_list: List[DescribeAiotVehicleTableItemsResponseBodyVehicleTableItemsVehicleTableItemList] = None,
):
self.page_num = page_num
self.page_size = page_size
self.total_num = total_num
self.vehicle_table_item_list = vehicle_table_item_list
def validate(self):
if self.vehicle_table_item_list:
for k in self.vehicle_table_item_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.total_num is not None:
result['TotalNum'] = self.total_num
result['VehicleTableItemList'] = []
if self.vehicle_table_item_list is not None:
for k in self.vehicle_table_item_list:
result['VehicleTableItemList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('TotalNum') is not None:
self.total_num = m.get('TotalNum')
self.vehicle_table_item_list = []
if m.get('VehicleTableItemList') is not None:
for k in m.get('VehicleTableItemList'):
temp_model = DescribeAiotVehicleTableItemsResponseBodyVehicleTableItemsVehicleTableItemList()
self.vehicle_table_item_list.append(temp_model.from_map(k))
return self
class DescribeAiotVehicleTableItemsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
vehicle_table_items: DescribeAiotVehicleTableItemsResponseBodyVehicleTableItems = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
self.vehicle_table_items = vehicle_table_items
def validate(self):
if self.vehicle_table_items:
self.vehicle_table_items.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.vehicle_table_items is not None:
result['VehicleTableItems'] = self.vehicle_table_items.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('VehicleTableItems') is not None:
temp_model = DescribeAiotVehicleTableItemsResponseBodyVehicleTableItems()
self.vehicle_table_items = temp_model.from_map(m['VehicleTableItems'])
return self
class DescribeAiotVehicleTableItemsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeAiotVehicleTableItemsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeAiotVehicleTableItemsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeAiotVehicleTablesRequest(TeaModel):
def __init__(
self,
id: str = None,
vehicle_table_id_list: str = None,
):
self.id = id
self.vehicle_table_id_list = vehicle_table_id_list
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['Id'] = self.id
if self.vehicle_table_id_list is not None:
result['VehicleTableIdList'] = self.vehicle_table_id_list
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Id') is not None:
self.id = m.get('Id')
if m.get('VehicleTableIdList') is not None:
self.vehicle_table_id_list = m.get('VehicleTableIdList')
return self
class DescribeAiotVehicleTablesResponseBodyVehicleTableList(TeaModel):
def __init__(
self,
vehicle_table_id: str = None,
vehicle_table_name: str = None,
):
self.vehicle_table_id = vehicle_table_id
self.vehicle_table_name = vehicle_table_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.vehicle_table_id is not None:
result['VehicleTableId'] = self.vehicle_table_id
if self.vehicle_table_name is not None:
result['VehicleTableName'] = self.vehicle_table_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('VehicleTableId') is not None:
self.vehicle_table_id = m.get('VehicleTableId')
if m.get('VehicleTableName') is not None:
self.vehicle_table_name = m.get('VehicleTableName')
return self
class DescribeAiotVehicleTablesResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
vehicle_table_list: List[DescribeAiotVehicleTablesResponseBodyVehicleTableList] = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
self.vehicle_table_list = vehicle_table_list
def validate(self):
if self.vehicle_table_list:
for k in self.vehicle_table_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
result['VehicleTableList'] = []
if self.vehicle_table_list is not None:
for k in self.vehicle_table_list:
result['VehicleTableList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.vehicle_table_list = []
if m.get('VehicleTableList') is not None:
for k in m.get('VehicleTableList'):
temp_model = DescribeAiotVehicleTablesResponseBodyVehicleTableList()
self.vehicle_table_list.append(temp_model.from_map(k))
return self
class DescribeAiotVehicleTablesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeAiotVehicleTablesResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeAiotVehicleTablesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeCameraForInstanceRequest(TeaModel):
def __init__(
self,
instance_id: str = None,
page_number: int = None,
page_size: int = None,
):
# 实例id
self.instance_id = instance_id
# 当前页序号
self.page_number = page_number
# 每页显示多少条数据
self.page_size = page_size
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
return self
class DescribeCameraForInstanceResponseBodyDataRecords(TeaModel):
def __init__(
self,
camera_address: str = None,
camera_id: str = None,
camera_name: str = None,
camera_status: int = None,
):
# 设备地址
self.camera_address = camera_address
# 设备编号
self.camera_id = camera_id
# 设备名称
self.camera_name = camera_name
# 设备在线状态。 1表示在线,0表示离线, 2表示待注册
self.camera_status = camera_status
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.camera_address is not None:
result['CameraAddress'] = self.camera_address
if self.camera_id is not None:
result['CameraId'] = self.camera_id
if self.camera_name is not None:
result['CameraName'] = self.camera_name
if self.camera_status is not None:
result['CameraStatus'] = self.camera_status
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CameraAddress') is not None:
self.camera_address = m.get('CameraAddress')
if m.get('CameraId') is not None:
self.camera_id = m.get('CameraId')
if m.get('CameraName') is not None:
self.camera_name = m.get('CameraName')
if m.get('CameraStatus') is not None:
self.camera_status = m.get('CameraStatus')
return self
class DescribeCameraForInstanceResponseBodyData(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
records: List[DescribeCameraForInstanceResponseBodyDataRecords] = None,
total_count: int = None,
):
# 当前页序号
self.page_number = page_number
# 每页显示多少条
self.page_size = page_size
# 返回数据条目
self.records = records
# 总数据数
self.total_count = total_count
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = DescribeCameraForInstanceResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
return self
class DescribeCameraForInstanceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: DescribeCameraForInstanceResponseBodyData = None,
message: str = None,
request_id: str = None,
):
# 返回错误码
self.code = code
# 返回数据
self.data = data
# 错误信息
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = DescribeCameraForInstanceResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DescribeCameraForInstanceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeCameraForInstanceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeCameraForInstanceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeChannelsRequest(TeaModel):
def __init__(
self,
device_filter: str = None,
device_status: str = None,
nvr_id: str = None,
page_num: int = None,
page_size: int = None,
show_un_config: int = None,
):
self.device_filter = device_filter
self.device_status = device_status
self.nvr_id = nvr_id
self.page_num = page_num
self.page_size = page_size
self.show_un_config = show_un_config
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_filter is not None:
result['DeviceFilter'] = self.device_filter
if self.device_status is not None:
result['DeviceStatus'] = self.device_status
if self.nvr_id is not None:
result['NvrId'] = self.nvr_id
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.show_un_config is not None:
result['ShowUnConfig'] = self.show_un_config
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceFilter') is not None:
self.device_filter = m.get('DeviceFilter')
if m.get('DeviceStatus') is not None:
self.device_status = m.get('DeviceStatus')
if m.get('NvrId') is not None:
self.nvr_id = m.get('NvrId')
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('ShowUnConfig') is not None:
self.show_un_config = m.get('ShowUnConfig')
return self
class DescribeChannelsResponseBodyDataRecords(TeaModel):
def __init__(
self,
audio_enable: str = None,
corp_id: str = None,
create_time: str = None,
datasource_type: str = None,
device_address: str = None,
device_direction: str = None,
device_id: str = None,
device_ip: str = None,
device_model: str = None,
device_name: str = None,
device_rate: str = None,
device_resolution: str = None,
device_site: str = None,
device_sn: str = None,
device_status: str = None,
device_sub_type: str = None,
device_type: str = None,
encode_format: str = None,
frame_rate: str = None,
gov_length: str = None,
in_protocol: str = None,
latitude: str = None,
longitude: str = None,
modify_time: str = None,
osdtime_enable: str = None,
osdtime_type: str = None,
osdtime_x: str = None,
osdtime_y: str = None,
parent_device_id: str = None,
password: str = None,
server_id: str = None,
server_ip: str = None,
server_port: str = None,
server_realm: str = None,
stream_action: str = None,
stream_status: str = None,
vap: str = None,
vendor: str = None,
):
self.audio_enable = audio_enable
self.corp_id = corp_id
self.create_time = create_time
self.datasource_type = datasource_type
self.device_address = device_address
self.device_direction = device_direction
self.device_id = device_id
self.device_ip = device_ip
self.device_model = device_model
self.device_name = device_name
self.device_rate = device_rate
self.device_resolution = device_resolution
self.device_site = device_site
self.device_sn = device_sn
self.device_status = device_status
self.device_sub_type = device_sub_type
self.device_type = device_type
self.encode_format = encode_format
self.frame_rate = frame_rate
self.gov_length = gov_length
self.in_protocol = in_protocol
self.latitude = latitude
self.longitude = longitude
self.modify_time = modify_time
self.osdtime_enable = osdtime_enable
self.osdtime_type = osdtime_type
self.osdtime_x = osdtime_x
self.osdtime_y = osdtime_y
self.parent_device_id = parent_device_id
self.password = password
self.server_id = server_id
self.server_ip = server_ip
self.server_port = server_port
self.server_realm = server_realm
self.stream_action = stream_action
self.stream_status = stream_status
self.vap = vap
self.vendor = vendor
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.audio_enable is not None:
result['AudioEnable'] = self.audio_enable
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.create_time is not None:
result['CreateTime'] = self.create_time
if self.datasource_type is not None:
result['DatasourceType'] = self.datasource_type
if self.device_address is not None:
result['DeviceAddress'] = self.device_address
if self.device_direction is not None:
result['DeviceDirection'] = self.device_direction
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.device_ip is not None:
result['DeviceIp'] = self.device_ip
if self.device_model is not None:
result['DeviceModel'] = self.device_model
if self.device_name is not None:
result['DeviceName'] = self.device_name
if self.device_rate is not None:
result['DeviceRate'] = self.device_rate
if self.device_resolution is not None:
result['DeviceResolution'] = self.device_resolution
if self.device_site is not None:
result['DeviceSite'] = self.device_site
if self.device_sn is not None:
result['DeviceSn'] = self.device_sn
if self.device_status is not None:
result['DeviceStatus'] = self.device_status
if self.device_sub_type is not None:
result['DeviceSubType'] = self.device_sub_type
if self.device_type is not None:
result['DeviceType'] = self.device_type
if self.encode_format is not None:
result['EncodeFormat'] = self.encode_format
if self.frame_rate is not None:
result['FrameRate'] = self.frame_rate
if self.gov_length is not None:
result['GovLength'] = self.gov_length
if self.in_protocol is not None:
result['InProtocol'] = self.in_protocol
if self.latitude is not None:
result['Latitude'] = self.latitude
if self.longitude is not None:
result['Longitude'] = self.longitude
if self.modify_time is not None:
result['ModifyTime'] = self.modify_time
if self.osdtime_enable is not None:
result['OSDTimeEnable'] = self.osdtime_enable
if self.osdtime_type is not None:
result['OSDTimeType'] = self.osdtime_type
if self.osdtime_x is not None:
result['OSDTimeX'] = self.osdtime_x
if self.osdtime_y is not None:
result['OSDTimeY'] = self.osdtime_y
if self.parent_device_id is not None:
result['ParentDeviceId'] = self.parent_device_id
if self.password is not None:
result['Password'] = self.password
if self.server_id is not None:
result['ServerId'] = self.server_id
if self.server_ip is not None:
result['ServerIp'] = self.server_ip
if self.server_port is not None:
result['ServerPort'] = self.server_port
if self.server_realm is not None:
result['ServerRealm'] = self.server_realm
if self.stream_action is not None:
result['StreamAction'] = self.stream_action
if self.stream_status is not None:
result['StreamStatus'] = self.stream_status
if self.vap is not None:
result['Vap'] = self.vap
if self.vendor is not None:
result['Vendor'] = self.vendor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AudioEnable') is not None:
self.audio_enable = m.get('AudioEnable')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('CreateTime') is not None:
self.create_time = m.get('CreateTime')
if m.get('DatasourceType') is not None:
self.datasource_type = m.get('DatasourceType')
if m.get('DeviceAddress') is not None:
self.device_address = m.get('DeviceAddress')
if m.get('DeviceDirection') is not None:
self.device_direction = m.get('DeviceDirection')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('DeviceIp') is not None:
self.device_ip = m.get('DeviceIp')
if m.get('DeviceModel') is not None:
self.device_model = m.get('DeviceModel')
if m.get('DeviceName') is not None:
self.device_name = m.get('DeviceName')
if m.get('DeviceRate') is not None:
self.device_rate = m.get('DeviceRate')
if m.get('DeviceResolution') is not None:
self.device_resolution = m.get('DeviceResolution')
if m.get('DeviceSite') is not None:
self.device_site = m.get('DeviceSite')
if m.get('DeviceSn') is not None:
self.device_sn = m.get('DeviceSn')
if m.get('DeviceStatus') is not None:
self.device_status = m.get('DeviceStatus')
if m.get('DeviceSubType') is not None:
self.device_sub_type = m.get('DeviceSubType')
if m.get('DeviceType') is not None:
self.device_type = m.get('DeviceType')
if m.get('EncodeFormat') is not None:
self.encode_format = m.get('EncodeFormat')
if m.get('FrameRate') is not None:
self.frame_rate = m.get('FrameRate')
if m.get('GovLength') is not None:
self.gov_length = m.get('GovLength')
if m.get('InProtocol') is not None:
self.in_protocol = m.get('InProtocol')
if m.get('Latitude') is not None:
self.latitude = m.get('Latitude')
if m.get('Longitude') is not None:
self.longitude = m.get('Longitude')
if m.get('ModifyTime') is not None:
self.modify_time = m.get('ModifyTime')
if m.get('OSDTimeEnable') is not None:
self.osdtime_enable = m.get('OSDTimeEnable')
if m.get('OSDTimeType') is not None:
self.osdtime_type = m.get('OSDTimeType')
if m.get('OSDTimeX') is not None:
self.osdtime_x = m.get('OSDTimeX')
if m.get('OSDTimeY') is not None:
self.osdtime_y = m.get('OSDTimeY')
if m.get('ParentDeviceId') is not None:
self.parent_device_id = m.get('ParentDeviceId')
if m.get('Password') is not None:
self.password = m.get('Password')
if m.get('ServerId') is not None:
self.server_id = m.get('ServerId')
if m.get('ServerIp') is not None:
self.server_ip = m.get('ServerIp')
if m.get('ServerPort') is not None:
self.server_port = m.get('ServerPort')
if m.get('ServerRealm') is not None:
self.server_realm = m.get('ServerRealm')
if m.get('StreamAction') is not None:
self.stream_action = m.get('StreamAction')
if m.get('StreamStatus') is not None:
self.stream_status = m.get('StreamStatus')
if m.get('Vap') is not None:
self.vap = m.get('Vap')
if m.get('Vendor') is not None:
self.vendor = m.get('Vendor')
return self
class DescribeChannelsResponseBodyData(TeaModel):
def __init__(
self,
page_num: int = None,
page_size: int = None,
records: List[DescribeChannelsResponseBodyDataRecords] = None,
total_count: int = None,
total_page: int = None,
un_config_list: List[str] = None,
):
self.page_num = page_num
self.page_size = page_size
self.records = records
self.total_count = total_count
self.total_page = total_page
self.un_config_list = un_config_list
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.total_page is not None:
result['TotalPage'] = self.total_page
if self.un_config_list is not None:
result['UnConfigList'] = self.un_config_list
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = DescribeChannelsResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('TotalPage') is not None:
self.total_page = m.get('TotalPage')
if m.get('UnConfigList') is not None:
self.un_config_list = m.get('UnConfigList')
return self
class DescribeChannelsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: DescribeChannelsResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = DescribeChannelsResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DescribeChannelsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeChannelsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeChannelsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDataSourcesRequest(TeaModel):
def __init__(
self,
corp_id_list: str = None,
data_source_category: str = None,
data_source_filter: str = None,
data_source_id_list: str = None,
data_source_type: str = None,
page_num: int = None,
page_size: int = None,
stream_status: str = None,
):
self.corp_id_list = corp_id_list
self.data_source_category = data_source_category
self.data_source_filter = data_source_filter
self.data_source_id_list = data_source_id_list
self.data_source_type = data_source_type
self.page_num = page_num
self.page_size = page_size
self.stream_status = stream_status
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id_list is not None:
result['CorpIdList'] = self.corp_id_list
if self.data_source_category is not None:
result['DataSourceCategory'] = self.data_source_category
if self.data_source_filter is not None:
result['DataSourceFilter'] = self.data_source_filter
if self.data_source_id_list is not None:
result['DataSourceIdList'] = self.data_source_id_list
if self.data_source_type is not None:
result['DataSourceType'] = self.data_source_type
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.stream_status is not None:
result['StreamStatus'] = self.stream_status
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpIdList') is not None:
self.corp_id_list = m.get('CorpIdList')
if m.get('DataSourceCategory') is not None:
self.data_source_category = m.get('DataSourceCategory')
if m.get('DataSourceFilter') is not None:
self.data_source_filter = m.get('DataSourceFilter')
if m.get('DataSourceIdList') is not None:
self.data_source_id_list = m.get('DataSourceIdList')
if m.get('DataSourceType') is not None:
self.data_source_type = m.get('DataSourceType')
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('StreamStatus') is not None:
self.stream_status = m.get('StreamStatus')
return self
class DescribeDataSourcesResponseBodyDataRecords(TeaModel):
def __init__(
self,
action: str = None,
corp_id: str = None,
create_time: str = None,
data_source_id: str = None,
data_source_name: str = None,
data_source_type: str = None,
description: str = None,
kafka_topic: str = None,
oss_path: str = None,
stream_status: str = None,
url: str = None,
):
self.action = action
self.corp_id = corp_id
self.create_time = create_time
self.data_source_id = data_source_id
self.data_source_name = data_source_name
self.data_source_type = data_source_type
self.description = description
self.kafka_topic = kafka_topic
self.oss_path = oss_path
self.stream_status = stream_status
self.url = url
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.action is not None:
result['Action'] = self.action
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.create_time is not None:
result['CreateTime'] = self.create_time
if self.data_source_id is not None:
result['DataSourceId'] = self.data_source_id
if self.data_source_name is not None:
result['DataSourceName'] = self.data_source_name
if self.data_source_type is not None:
result['DataSourceType'] = self.data_source_type
if self.description is not None:
result['Description'] = self.description
if self.kafka_topic is not None:
result['KafkaTopic'] = self.kafka_topic
if self.oss_path is not None:
result['OssPath'] = self.oss_path
if self.stream_status is not None:
result['StreamStatus'] = self.stream_status
if self.url is not None:
result['Url'] = self.url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Action') is not None:
self.action = m.get('Action')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('CreateTime') is not None:
self.create_time = m.get('CreateTime')
if m.get('DataSourceId') is not None:
self.data_source_id = m.get('DataSourceId')
if m.get('DataSourceName') is not None:
self.data_source_name = m.get('DataSourceName')
if m.get('DataSourceType') is not None:
self.data_source_type = m.get('DataSourceType')
if m.get('Description') is not None:
self.description = m.get('Description')
if m.get('KafkaTopic') is not None:
self.kafka_topic = m.get('KafkaTopic')
if m.get('OssPath') is not None:
self.oss_path = m.get('OssPath')
if m.get('StreamStatus') is not None:
self.stream_status = m.get('StreamStatus')
if m.get('Url') is not None:
self.url = m.get('Url')
return self
class DescribeDataSourcesResponseBodyData(TeaModel):
def __init__(
self,
page_num: int = None,
page_size: int = None,
records: List[DescribeDataSourcesResponseBodyDataRecords] = None,
total_count: int = None,
total_page: int = None,
):
self.page_num = page_num
self.page_size = page_size
self.records = records
self.total_count = total_count
self.total_page = total_page
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.total_page is not None:
result['TotalPage'] = self.total_page
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = DescribeDataSourcesResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('TotalPage') is not None:
self.total_page = m.get('TotalPage')
return self
class DescribeDataSourcesResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: DescribeDataSourcesResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = DescribeDataSourcesResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DescribeDataSourcesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeDataSourcesResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeDataSourcesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDevicesRequest(TeaModel):
def __init__(
self,
corp_id_list: str = None,
device_id_list: str = None,
page_num: int = None,
page_size: int = None,
):
self.corp_id_list = corp_id_list
self.device_id_list = device_id_list
self.page_num = page_num
self.page_size = page_size
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id_list is not None:
result['CorpIdList'] = self.corp_id_list
if self.device_id_list is not None:
result['DeviceIdList'] = self.device_id_list
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpIdList') is not None:
self.corp_id_list = m.get('CorpIdList')
if m.get('DeviceIdList') is not None:
self.device_id_list = m.get('DeviceIdList')
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
return self
class DescribeDevicesResponseBodyDataRecords(TeaModel):
def __init__(
self,
captured_picture_id: str = None,
corp_id: str = None,
create_time: str = None,
device_address: str = None,
device_id: str = None,
device_name: str = None,
device_type: str = None,
in_protocol: str = None,
latitude: str = None,
longitude: str = None,
password: str = None,
status: str = None,
vendor: str = None,
):
self.captured_picture_id = captured_picture_id
self.corp_id = corp_id
self.create_time = create_time
self.device_address = device_address
self.device_id = device_id
self.device_name = device_name
self.device_type = device_type
self.in_protocol = in_protocol
self.latitude = latitude
self.longitude = longitude
self.password = password
self.status = status
self.vendor = vendor
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.captured_picture_id is not None:
result['CapturedPictureId'] = self.captured_picture_id
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.create_time is not None:
result['CreateTime'] = self.create_time
if self.device_address is not None:
result['DeviceAddress'] = self.device_address
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.device_name is not None:
result['DeviceName'] = self.device_name
if self.device_type is not None:
result['DeviceType'] = self.device_type
if self.in_protocol is not None:
result['InProtocol'] = self.in_protocol
if self.latitude is not None:
result['Latitude'] = self.latitude
if self.longitude is not None:
result['Longitude'] = self.longitude
if self.password is not None:
result['Password'] = self.password
if self.status is not None:
result['Status'] = self.status
if self.vendor is not None:
result['Vendor'] = self.vendor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CapturedPictureId') is not None:
self.captured_picture_id = m.get('CapturedPictureId')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('CreateTime') is not None:
self.create_time = m.get('CreateTime')
if m.get('DeviceAddress') is not None:
self.device_address = m.get('DeviceAddress')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('DeviceName') is not None:
self.device_name = m.get('DeviceName')
if m.get('DeviceType') is not None:
self.device_type = m.get('DeviceType')
if m.get('InProtocol') is not None:
self.in_protocol = m.get('InProtocol')
if m.get('Latitude') is not None:
self.latitude = m.get('Latitude')
if m.get('Longitude') is not None:
self.longitude = m.get('Longitude')
if m.get('Password') is not None:
self.password = m.get('Password')
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('Vendor') is not None:
self.vendor = m.get('Vendor')
return self
class DescribeDevicesResponseBodyData(TeaModel):
def __init__(
self,
page_num: int = None,
page_size: int = None,
records: List[DescribeDevicesResponseBodyDataRecords] = None,
total_count: int = None,
total_page: int = None,
):
self.page_num = page_num
self.page_size = page_size
self.records = records
self.total_count = total_count
self.total_page = total_page
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.total_page is not None:
result['TotalPage'] = self.total_page
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = DescribeDevicesResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('TotalPage') is not None:
self.total_page = m.get('TotalPage')
return self
class DescribeDevicesResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: DescribeDevicesResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = DescribeDevicesResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DescribeDevicesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeDevicesResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeDevicesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeIpcsRequest(TeaModel):
def __init__(
self,
corp_id_list: str = None,
device_filter: str = None,
device_id_list: str = None,
device_status: str = None,
nvr_id_list: str = None,
page_num: int = None,
page_size: int = None,
parent_device_type: str = None,
):
self.corp_id_list = corp_id_list
self.device_filter = device_filter
self.device_id_list = device_id_list
self.device_status = device_status
self.nvr_id_list = nvr_id_list
self.page_num = page_num
self.page_size = page_size
self.parent_device_type = parent_device_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id_list is not None:
result['CorpIdList'] = self.corp_id_list
if self.device_filter is not None:
result['DeviceFilter'] = self.device_filter
if self.device_id_list is not None:
result['DeviceIdList'] = self.device_id_list
if self.device_status is not None:
result['DeviceStatus'] = self.device_status
if self.nvr_id_list is not None:
result['NvrIdList'] = self.nvr_id_list
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.parent_device_type is not None:
result['ParentDeviceType'] = self.parent_device_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpIdList') is not None:
self.corp_id_list = m.get('CorpIdList')
if m.get('DeviceFilter') is not None:
self.device_filter = m.get('DeviceFilter')
if m.get('DeviceIdList') is not None:
self.device_id_list = m.get('DeviceIdList')
if m.get('DeviceStatus') is not None:
self.device_status = m.get('DeviceStatus')
if m.get('NvrIdList') is not None:
self.nvr_id_list = m.get('NvrIdList')
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('ParentDeviceType') is not None:
self.parent_device_type = m.get('ParentDeviceType')
return self
class DescribeIpcsResponseBodyDataRecords(TeaModel):
def __init__(
self,
audio_enable: str = None,
corp_id: str = None,
create_time: str = None,
datasource_type: str = None,
device_address: str = None,
device_direction: str = None,
device_id: str = None,
device_ip: str = None,
device_model: str = None,
device_name: str = None,
device_rate: str = None,
device_resolution: str = None,
device_site: str = None,
device_sn: str = None,
device_status: str = None,
device_sub_type: str = None,
device_type: str = None,
encode_format: str = None,
frame_rate: str = None,
gov_length: str = None,
in_protocol: str = None,
latitude: str = None,
longitude: str = None,
modify_time: str = None,
osdtime_enable: str = None,
osdtime_type: str = None,
osdtime_x: str = None,
osdtime_y: str = None,
parent_device_id: str = None,
password: str = None,
server_id: str = None,
server_ip: str = None,
server_port: str = None,
server_realm: str = None,
stream_action: str = None,
stream_status: str = None,
vap: str = None,
vendor: str = None,
):
self.audio_enable = audio_enable
self.corp_id = corp_id
self.create_time = create_time
self.datasource_type = datasource_type
self.device_address = device_address
self.device_direction = device_direction
self.device_id = device_id
self.device_ip = device_ip
self.device_model = device_model
self.device_name = device_name
self.device_rate = device_rate
self.device_resolution = device_resolution
self.device_site = device_site
self.device_sn = device_sn
self.device_status = device_status
self.device_sub_type = device_sub_type
self.device_type = device_type
self.encode_format = encode_format
self.frame_rate = frame_rate
self.gov_length = gov_length
self.in_protocol = in_protocol
self.latitude = latitude
self.longitude = longitude
self.modify_time = modify_time
self.osdtime_enable = osdtime_enable
self.osdtime_type = osdtime_type
self.osdtime_x = osdtime_x
self.osdtime_y = osdtime_y
self.parent_device_id = parent_device_id
self.password = password
self.server_id = server_id
self.server_ip = server_ip
self.server_port = server_port
self.server_realm = server_realm
self.stream_action = stream_action
self.stream_status = stream_status
self.vap = vap
self.vendor = vendor
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.audio_enable is not None:
result['AudioEnable'] = self.audio_enable
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.create_time is not None:
result['CreateTime'] = self.create_time
if self.datasource_type is not None:
result['DatasourceType'] = self.datasource_type
if self.device_address is not None:
result['DeviceAddress'] = self.device_address
if self.device_direction is not None:
result['DeviceDirection'] = self.device_direction
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.device_ip is not None:
result['DeviceIp'] = self.device_ip
if self.device_model is not None:
result['DeviceModel'] = self.device_model
if self.device_name is not None:
result['DeviceName'] = self.device_name
if self.device_rate is not None:
result['DeviceRate'] = self.device_rate
if self.device_resolution is not None:
result['DeviceResolution'] = self.device_resolution
if self.device_site is not None:
result['DeviceSite'] = self.device_site
if self.device_sn is not None:
result['DeviceSn'] = self.device_sn
if self.device_status is not None:
result['DeviceStatus'] = self.device_status
if self.device_sub_type is not None:
result['DeviceSubType'] = self.device_sub_type
if self.device_type is not None:
result['DeviceType'] = self.device_type
if self.encode_format is not None:
result['EncodeFormat'] = self.encode_format
if self.frame_rate is not None:
result['FrameRate'] = self.frame_rate
if self.gov_length is not None:
result['GovLength'] = self.gov_length
if self.in_protocol is not None:
result['InProtocol'] = self.in_protocol
if self.latitude is not None:
result['Latitude'] = self.latitude
if self.longitude is not None:
result['Longitude'] = self.longitude
if self.modify_time is not None:
result['ModifyTime'] = self.modify_time
if self.osdtime_enable is not None:
result['OSDTimeEnable'] = self.osdtime_enable
if self.osdtime_type is not None:
result['OSDTimeType'] = self.osdtime_type
if self.osdtime_x is not None:
result['OSDTimeX'] = self.osdtime_x
if self.osdtime_y is not None:
result['OSDTimeY'] = self.osdtime_y
if self.parent_device_id is not None:
result['ParentDeviceId'] = self.parent_device_id
if self.password is not None:
result['Password'] = self.password
if self.server_id is not None:
result['ServerId'] = self.server_id
if self.server_ip is not None:
result['ServerIp'] = self.server_ip
if self.server_port is not None:
result['ServerPort'] = self.server_port
if self.server_realm is not None:
result['ServerRealm'] = self.server_realm
if self.stream_action is not None:
result['StreamAction'] = self.stream_action
if self.stream_status is not None:
result['StreamStatus'] = self.stream_status
if self.vap is not None:
result['Vap'] = self.vap
if self.vendor is not None:
result['Vendor'] = self.vendor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AudioEnable') is not None:
self.audio_enable = m.get('AudioEnable')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('CreateTime') is not None:
self.create_time = m.get('CreateTime')
if m.get('DatasourceType') is not None:
self.datasource_type = m.get('DatasourceType')
if m.get('DeviceAddress') is not None:
self.device_address = m.get('DeviceAddress')
if m.get('DeviceDirection') is not None:
self.device_direction = m.get('DeviceDirection')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('DeviceIp') is not None:
self.device_ip = m.get('DeviceIp')
if m.get('DeviceModel') is not None:
self.device_model = m.get('DeviceModel')
if m.get('DeviceName') is not None:
self.device_name = m.get('DeviceName')
if m.get('DeviceRate') is not None:
self.device_rate = m.get('DeviceRate')
if m.get('DeviceResolution') is not None:
self.device_resolution = m.get('DeviceResolution')
if m.get('DeviceSite') is not None:
self.device_site = m.get('DeviceSite')
if m.get('DeviceSn') is not None:
self.device_sn = m.get('DeviceSn')
if m.get('DeviceStatus') is not None:
self.device_status = m.get('DeviceStatus')
if m.get('DeviceSubType') is not None:
self.device_sub_type = m.get('DeviceSubType')
if m.get('DeviceType') is not None:
self.device_type = m.get('DeviceType')
if m.get('EncodeFormat') is not None:
self.encode_format = m.get('EncodeFormat')
if m.get('FrameRate') is not None:
self.frame_rate = m.get('FrameRate')
if m.get('GovLength') is not None:
self.gov_length = m.get('GovLength')
if m.get('InProtocol') is not None:
self.in_protocol = m.get('InProtocol')
if m.get('Latitude') is not None:
self.latitude = m.get('Latitude')
if m.get('Longitude') is not None:
self.longitude = m.get('Longitude')
if m.get('ModifyTime') is not None:
self.modify_time = m.get('ModifyTime')
if m.get('OSDTimeEnable') is not None:
self.osdtime_enable = m.get('OSDTimeEnable')
if m.get('OSDTimeType') is not None:
self.osdtime_type = m.get('OSDTimeType')
if m.get('OSDTimeX') is not None:
self.osdtime_x = m.get('OSDTimeX')
if m.get('OSDTimeY') is not None:
self.osdtime_y = m.get('OSDTimeY')
if m.get('ParentDeviceId') is not None:
self.parent_device_id = m.get('ParentDeviceId')
if m.get('Password') is not None:
self.password = m.get('Password')
if m.get('ServerId') is not None:
self.server_id = m.get('ServerId')
if m.get('ServerIp') is not None:
self.server_ip = m.get('ServerIp')
if m.get('ServerPort') is not None:
self.server_port = m.get('ServerPort')
if m.get('ServerRealm') is not None:
self.server_realm = m.get('ServerRealm')
if m.get('StreamAction') is not None:
self.stream_action = m.get('StreamAction')
if m.get('StreamStatus') is not None:
self.stream_status = m.get('StreamStatus')
if m.get('Vap') is not None:
self.vap = m.get('Vap')
if m.get('Vendor') is not None:
self.vendor = m.get('Vendor')
return self
class DescribeIpcsResponseBodyData(TeaModel):
def __init__(
self,
page_num: int = None,
page_size: int = None,
records: List[DescribeIpcsResponseBodyDataRecords] = None,
total_count: int = None,
total_page: int = None,
):
self.page_num = page_num
self.page_size = page_size
self.records = records
self.total_count = total_count
self.total_page = total_page
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.total_page is not None:
result['TotalPage'] = self.total_page
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = DescribeIpcsResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('TotalPage') is not None:
self.total_page = m.get('TotalPage')
return self
class DescribeIpcsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: DescribeIpcsResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = DescribeIpcsResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DescribeIpcsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeIpcsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeIpcsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeModelServiceRequest(TeaModel):
def __init__(
self,
model_service_id: str = None,
):
self.model_service_id = model_service_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.model_service_id is not None:
result['ModelServiceId'] = self.model_service_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ModelServiceId') is not None:
self.model_service_id = m.get('ModelServiceId')
return self
class DescribeModelServiceResponseBodyDataModelApiList(TeaModel):
def __init__(
self,
algorithm_api_code: str = None,
api_id: str = None,
api_name: str = None,
api_path: str = None,
create_time: str = None,
):
self.algorithm_api_code = algorithm_api_code
self.api_id = api_id
self.api_name = api_name
self.api_path = api_path
self.create_time = create_time
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_api_code is not None:
result['AlgorithmApiCode'] = self.algorithm_api_code
if self.api_id is not None:
result['ApiId'] = self.api_id
if self.api_name is not None:
result['ApiName'] = self.api_name
if self.api_path is not None:
result['ApiPath'] = self.api_path
if self.create_time is not None:
result['CreateTime'] = self.create_time
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmApiCode') is not None:
self.algorithm_api_code = m.get('AlgorithmApiCode')
if m.get('ApiId') is not None:
self.api_id = m.get('ApiId')
if m.get('ApiName') is not None:
self.api_name = m.get('ApiName')
if m.get('ApiPath') is not None:
self.api_path = m.get('ApiPath')
if m.get('CreateTime') is not None:
self.create_time = m.get('CreateTime')
return self
class DescribeModelServiceResponseBodyData(TeaModel):
def __init__(
self,
algorithm_code: str = None,
app_code: str = None,
create_time: str = None,
model_api_list: List[DescribeModelServiceResponseBodyDataModelApiList] = None,
model_service_instance_id: str = None,
model_service_instance_name: str = None,
model_service_status: str = None,
qps: int = None,
):
self.algorithm_code = algorithm_code
self.app_code = app_code
self.create_time = create_time
self.model_api_list = model_api_list
self.model_service_instance_id = model_service_instance_id
self.model_service_instance_name = model_service_instance_name
self.model_service_status = model_service_status
self.qps = qps
def validate(self):
if self.model_api_list:
for k in self.model_api_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_code is not None:
result['AlgorithmCode'] = self.algorithm_code
if self.app_code is not None:
result['AppCode'] = self.app_code
if self.create_time is not None:
result['CreateTime'] = self.create_time
result['ModelApiList'] = []
if self.model_api_list is not None:
for k in self.model_api_list:
result['ModelApiList'].append(k.to_map() if k else None)
if self.model_service_instance_id is not None:
result['ModelServiceInstanceId'] = self.model_service_instance_id
if self.model_service_instance_name is not None:
result['ModelServiceInstanceName'] = self.model_service_instance_name
if self.model_service_status is not None:
result['ModelServiceStatus'] = self.model_service_status
if self.qps is not None:
result['Qps'] = self.qps
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmCode') is not None:
self.algorithm_code = m.get('AlgorithmCode')
if m.get('AppCode') is not None:
self.app_code = m.get('AppCode')
if m.get('CreateTime') is not None:
self.create_time = m.get('CreateTime')
self.model_api_list = []
if m.get('ModelApiList') is not None:
for k in m.get('ModelApiList'):
temp_model = DescribeModelServiceResponseBodyDataModelApiList()
self.model_api_list.append(temp_model.from_map(k))
if m.get('ModelServiceInstanceId') is not None:
self.model_service_instance_id = m.get('ModelServiceInstanceId')
if m.get('ModelServiceInstanceName') is not None:
self.model_service_instance_name = m.get('ModelServiceInstanceName')
if m.get('ModelServiceStatus') is not None:
self.model_service_status = m.get('ModelServiceStatus')
if m.get('Qps') is not None:
self.qps = m.get('Qps')
return self
class DescribeModelServiceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: DescribeModelServiceResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = DescribeModelServiceResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DescribeModelServiceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeModelServiceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeModelServiceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeModelServiceListRequest(TeaModel):
def __init__(
self,
algorithm_code: str = None,
include_deleted: bool = None,
model_service_name: str = None,
page_num: int = None,
page_size: int = None,
):
self.algorithm_code = algorithm_code
self.include_deleted = include_deleted
self.model_service_name = model_service_name
self.page_num = page_num
self.page_size = page_size
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_code is not None:
result['AlgorithmCode'] = self.algorithm_code
if self.include_deleted is not None:
result['IncludeDeleted'] = self.include_deleted
if self.model_service_name is not None:
result['ModelServiceName'] = self.model_service_name
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmCode') is not None:
self.algorithm_code = m.get('AlgorithmCode')
if m.get('IncludeDeleted') is not None:
self.include_deleted = m.get('IncludeDeleted')
if m.get('ModelServiceName') is not None:
self.model_service_name = m.get('ModelServiceName')
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
return self
class DescribeModelServiceListResponseBodyDataModelList(TeaModel):
def __init__(
self,
algorithm_code: str = None,
app_code: str = None,
create_time: str = None,
model_service_instance_id: str = None,
model_service_name: str = None,
model_service_status: str = None,
qps_required: int = None,
):
self.algorithm_code = algorithm_code
self.app_code = app_code
self.create_time = create_time
self.model_service_instance_id = model_service_instance_id
self.model_service_name = model_service_name
self.model_service_status = model_service_status
self.qps_required = qps_required
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_code is not None:
result['AlgorithmCode'] = self.algorithm_code
if self.app_code is not None:
result['AppCode'] = self.app_code
if self.create_time is not None:
result['CreateTime'] = self.create_time
if self.model_service_instance_id is not None:
result['ModelServiceInstanceId'] = self.model_service_instance_id
if self.model_service_name is not None:
result['ModelServiceName'] = self.model_service_name
if self.model_service_status is not None:
result['ModelServiceStatus'] = self.model_service_status
if self.qps_required is not None:
result['QpsRequired'] = self.qps_required
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmCode') is not None:
self.algorithm_code = m.get('AlgorithmCode')
if m.get('AppCode') is not None:
self.app_code = m.get('AppCode')
if m.get('CreateTime') is not None:
self.create_time = m.get('CreateTime')
if m.get('ModelServiceInstanceId') is not None:
self.model_service_instance_id = m.get('ModelServiceInstanceId')
if m.get('ModelServiceName') is not None:
self.model_service_name = m.get('ModelServiceName')
if m.get('ModelServiceStatus') is not None:
self.model_service_status = m.get('ModelServiceStatus')
if m.get('QpsRequired') is not None:
self.qps_required = m.get('QpsRequired')
return self
class DescribeModelServiceListResponseBodyData(TeaModel):
def __init__(
self,
model_list: List[DescribeModelServiceListResponseBodyDataModelList] = None,
total: str = None,
):
self.model_list = model_list
self.total = total
def validate(self):
if self.model_list:
for k in self.model_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['ModelList'] = []
if self.model_list is not None:
for k in self.model_list:
result['ModelList'].append(k.to_map() if k else None)
if self.total is not None:
result['Total'] = self.total
return result
def from_map(self, m: dict = None):
m = m or dict()
self.model_list = []
if m.get('ModelList') is not None:
for k in m.get('ModelList'):
temp_model = DescribeModelServiceListResponseBodyDataModelList()
self.model_list.append(temp_model.from_map(k))
if m.get('Total') is not None:
self.total = m.get('Total')
return self
class DescribeModelServiceListResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: DescribeModelServiceListResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = DescribeModelServiceListResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DescribeModelServiceListResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeModelServiceListResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeModelServiceListResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeNvrDevicesRequest(TeaModel):
def __init__(
self,
corp_id_list: str = None,
device_filter: str = None,
nvr_device_id_list: str = None,
page_num: int = None,
page_size: int = None,
):
self.corp_id_list = corp_id_list
self.device_filter = device_filter
self.nvr_device_id_list = nvr_device_id_list
self.page_num = page_num
self.page_size = page_size
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id_list is not None:
result['CorpIdList'] = self.corp_id_list
if self.device_filter is not None:
result['DeviceFilter'] = self.device_filter
if self.nvr_device_id_list is not None:
result['NvrDeviceIdList'] = self.nvr_device_id_list
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpIdList') is not None:
self.corp_id_list = m.get('CorpIdList')
if m.get('DeviceFilter') is not None:
self.device_filter = m.get('DeviceFilter')
if m.get('NvrDeviceIdList') is not None:
self.nvr_device_id_list = m.get('NvrDeviceIdList')
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
return self
class DescribeNvrDevicesResponseBodyDataRecords(TeaModel):
def __init__(
self,
channel: str = None,
corp_id: str = None,
create_time: str = None,
datasource_type: str = None,
device_id: str = None,
device_model: str = None,
device_name: str = None,
device_sn: str = None,
device_status: str = None,
device_type: str = None,
modify_time: str = None,
project_name: str = None,
region_name: str = None,
):
self.channel = channel
self.corp_id = corp_id
self.create_time = create_time
self.datasource_type = datasource_type
self.device_id = device_id
self.device_model = device_model
self.device_name = device_name
self.device_sn = device_sn
self.device_status = device_status
self.device_type = device_type
self.modify_time = modify_time
self.project_name = project_name
self.region_name = region_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.channel is not None:
result['Channel'] = self.channel
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.create_time is not None:
result['CreateTime'] = self.create_time
if self.datasource_type is not None:
result['DatasourceType'] = self.datasource_type
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.device_model is not None:
result['DeviceModel'] = self.device_model
if self.device_name is not None:
result['DeviceName'] = self.device_name
if self.device_sn is not None:
result['DeviceSn'] = self.device_sn
if self.device_status is not None:
result['DeviceStatus'] = self.device_status
if self.device_type is not None:
result['DeviceType'] = self.device_type
if self.modify_time is not None:
result['ModifyTime'] = self.modify_time
if self.project_name is not None:
result['ProjectName'] = self.project_name
if self.region_name is not None:
result['RegionName'] = self.region_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Channel') is not None:
self.channel = m.get('Channel')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('CreateTime') is not None:
self.create_time = m.get('CreateTime')
if m.get('DatasourceType') is not None:
self.datasource_type = m.get('DatasourceType')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('DeviceModel') is not None:
self.device_model = m.get('DeviceModel')
if m.get('DeviceName') is not None:
self.device_name = m.get('DeviceName')
if m.get('DeviceSn') is not None:
self.device_sn = m.get('DeviceSn')
if m.get('DeviceStatus') is not None:
self.device_status = m.get('DeviceStatus')
if m.get('DeviceType') is not None:
self.device_type = m.get('DeviceType')
if m.get('ModifyTime') is not None:
self.modify_time = m.get('ModifyTime')
if m.get('ProjectName') is not None:
self.project_name = m.get('ProjectName')
if m.get('RegionName') is not None:
self.region_name = m.get('RegionName')
return self
class DescribeNvrDevicesResponseBodyData(TeaModel):
def __init__(
self,
page_num: int = None,
page_size: int = None,
records: List[DescribeNvrDevicesResponseBodyDataRecords] = None,
total_count: int = None,
total_page: int = None,
):
self.page_num = page_num
self.page_size = page_size
self.records = records
self.total_count = total_count
self.total_page = total_page
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.total_page is not None:
result['TotalPage'] = self.total_page
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = DescribeNvrDevicesResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('TotalPage') is not None:
self.total_page = m.get('TotalPage')
return self
class DescribeNvrDevicesResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: DescribeNvrDevicesResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = DescribeNvrDevicesResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DescribeNvrDevicesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeNvrDevicesResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeNvrDevicesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeNvrsRequest(TeaModel):
def __init__(
self,
corp_id_list: str = None,
device_filter: str = None,
nvr_device_id_list: str = None,
page_num: int = None,
page_size: int = None,
):
self.corp_id_list = corp_id_list
self.device_filter = device_filter
self.nvr_device_id_list = nvr_device_id_list
self.page_num = page_num
self.page_size = page_size
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id_list is not None:
result['CorpIdList'] = self.corp_id_list
if self.device_filter is not None:
result['DeviceFilter'] = self.device_filter
if self.nvr_device_id_list is not None:
result['NvrDeviceIdList'] = self.nvr_device_id_list
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpIdList') is not None:
self.corp_id_list = m.get('CorpIdList')
if m.get('DeviceFilter') is not None:
self.device_filter = m.get('DeviceFilter')
if m.get('NvrDeviceIdList') is not None:
self.nvr_device_id_list = m.get('NvrDeviceIdList')
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
return self
class DescribeNvrsResponseBodyDataRecordsSubDeviceInfo(TeaModel):
def __init__(
self,
sub_device_id: str = None,
):
self.sub_device_id = sub_device_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.sub_device_id is not None:
result['SubDeviceId'] = self.sub_device_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('SubDeviceId') is not None:
self.sub_device_id = m.get('SubDeviceId')
return self
class DescribeNvrsResponseBodyDataRecords(TeaModel):
def __init__(
self,
corp_id: str = None,
create_time: str = None,
datasource_type: str = None,
device_address: str = None,
device_id: str = None,
device_model: str = None,
device_name: str = None,
device_sn: str = None,
device_status: str = None,
device_type: str = None,
in_protocol: str = None,
latitude: str = None,
longitude: str = None,
modify_time: str = None,
password: str = None,
server_id: str = None,
server_ip: str = None,
server_port: str = None,
server_realm: str = None,
sub_device_count: str = None,
sub_device_info: List[DescribeNvrsResponseBodyDataRecordsSubDeviceInfo] = None,
vendor: str = None,
):
self.corp_id = corp_id
self.create_time = create_time
self.datasource_type = datasource_type
self.device_address = device_address
self.device_id = device_id
self.device_model = device_model
self.device_name = device_name
self.device_sn = device_sn
self.device_status = device_status
self.device_type = device_type
self.in_protocol = in_protocol
self.latitude = latitude
self.longitude = longitude
self.modify_time = modify_time
self.password = password
self.server_id = server_id
self.server_ip = server_ip
self.server_port = server_port
self.server_realm = server_realm
self.sub_device_count = sub_device_count
self.sub_device_info = sub_device_info
self.vendor = vendor
def validate(self):
if self.sub_device_info:
for k in self.sub_device_info:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.create_time is not None:
result['CreateTime'] = self.create_time
if self.datasource_type is not None:
result['DatasourceType'] = self.datasource_type
if self.device_address is not None:
result['DeviceAddress'] = self.device_address
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.device_model is not None:
result['DeviceModel'] = self.device_model
if self.device_name is not None:
result['DeviceName'] = self.device_name
if self.device_sn is not None:
result['DeviceSn'] = self.device_sn
if self.device_status is not None:
result['DeviceStatus'] = self.device_status
if self.device_type is not None:
result['DeviceType'] = self.device_type
if self.in_protocol is not None:
result['InProtocol'] = self.in_protocol
if self.latitude is not None:
result['Latitude'] = self.latitude
if self.longitude is not None:
result['Longitude'] = self.longitude
if self.modify_time is not None:
result['ModifyTime'] = self.modify_time
if self.password is not None:
result['Password'] = self.password
if self.server_id is not None:
result['ServerId'] = self.server_id
if self.server_ip is not None:
result['ServerIp'] = self.server_ip
if self.server_port is not None:
result['ServerPort'] = self.server_port
if self.server_realm is not None:
result['ServerRealm'] = self.server_realm
if self.sub_device_count is not None:
result['SubDeviceCount'] = self.sub_device_count
result['SubDeviceInfo'] = []
if self.sub_device_info is not None:
for k in self.sub_device_info:
result['SubDeviceInfo'].append(k.to_map() if k else None)
if self.vendor is not None:
result['Vendor'] = self.vendor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('CreateTime') is not None:
self.create_time = m.get('CreateTime')
if m.get('DatasourceType') is not None:
self.datasource_type = m.get('DatasourceType')
if m.get('DeviceAddress') is not None:
self.device_address = m.get('DeviceAddress')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('DeviceModel') is not None:
self.device_model = m.get('DeviceModel')
if m.get('DeviceName') is not None:
self.device_name = m.get('DeviceName')
if m.get('DeviceSn') is not None:
self.device_sn = m.get('DeviceSn')
if m.get('DeviceStatus') is not None:
self.device_status = m.get('DeviceStatus')
if m.get('DeviceType') is not None:
self.device_type = m.get('DeviceType')
if m.get('InProtocol') is not None:
self.in_protocol = m.get('InProtocol')
if m.get('Latitude') is not None:
self.latitude = m.get('Latitude')
if m.get('Longitude') is not None:
self.longitude = m.get('Longitude')
if m.get('ModifyTime') is not None:
self.modify_time = m.get('ModifyTime')
if m.get('Password') is not None:
self.password = m.get('Password')
if m.get('ServerId') is not None:
self.server_id = m.get('ServerId')
if m.get('ServerIp') is not None:
self.server_ip = m.get('ServerIp')
if m.get('ServerPort') is not None:
self.server_port = m.get('ServerPort')
if m.get('ServerRealm') is not None:
self.server_realm = m.get('ServerRealm')
if m.get('SubDeviceCount') is not None:
self.sub_device_count = m.get('SubDeviceCount')
self.sub_device_info = []
if m.get('SubDeviceInfo') is not None:
for k in m.get('SubDeviceInfo'):
temp_model = DescribeNvrsResponseBodyDataRecordsSubDeviceInfo()
self.sub_device_info.append(temp_model.from_map(k))
if m.get('Vendor') is not None:
self.vendor = m.get('Vendor')
return self
class DescribeNvrsResponseBodyData(TeaModel):
def __init__(
self,
page_num: int = None,
page_size: int = None,
records: List[DescribeNvrsResponseBodyDataRecords] = None,
total_count: int = None,
total_page: int = None,
):
self.page_num = page_num
self.page_size = page_size
self.records = records
self.total_count = total_count
self.total_page = total_page
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.total_page is not None:
result['TotalPage'] = self.total_page
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = DescribeNvrsResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('TotalPage') is not None:
self.total_page = m.get('TotalPage')
return self
class DescribeNvrsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: DescribeNvrsResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = DescribeNvrsResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DescribeNvrsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeNvrsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeNvrsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeSearchItemsRequest(TeaModel):
def __init__(
self,
page_number: str = None,
page_size: str = None,
search_item_ids: str = None,
search_table_id: str = None,
):
self.page_number = page_number
self.page_size = page_size
self.search_item_ids = search_item_ids
self.search_table_id = search_table_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.search_item_ids is not None:
result['SearchItemIds'] = self.search_item_ids
if self.search_table_id is not None:
result['SearchTableId'] = self.search_table_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('SearchItemIds') is not None:
self.search_item_ids = m.get('SearchItemIds')
if m.get('SearchTableId') is not None:
self.search_table_id = m.get('SearchTableId')
return self
class DescribeSearchItemsResponseBodyDataRecords(TeaModel):
def __init__(
self,
item_image_url: str = None,
search_item_id: str = None,
search_item_name: str = None,
):
self.item_image_url = item_image_url
self.search_item_id = search_item_id
self.search_item_name = search_item_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.item_image_url is not None:
result['ItemImageUrl'] = self.item_image_url
if self.search_item_id is not None:
result['SearchItemId'] = self.search_item_id
if self.search_item_name is not None:
result['SearchItemName'] = self.search_item_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ItemImageUrl') is not None:
self.item_image_url = m.get('ItemImageUrl')
if m.get('SearchItemId') is not None:
self.search_item_id = m.get('SearchItemId')
if m.get('SearchItemName') is not None:
self.search_item_name = m.get('SearchItemName')
return self
class DescribeSearchItemsResponseBodyData(TeaModel):
def __init__(
self,
page_number: str = None,
page_size: str = None,
records: List[DescribeSearchItemsResponseBodyDataRecords] = None,
total_count: str = None,
):
self.page_number = page_number
self.page_size = page_size
self.records = records
self.total_count = total_count
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = DescribeSearchItemsResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
return self
class DescribeSearchItemsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: DescribeSearchItemsResponseBodyData = None,
message: str = None,
request_id: str = None,
success: str = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = DescribeSearchItemsResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class DescribeSearchItemsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeSearchItemsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeSearchItemsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeSearchTablesRequest(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
search_table_ids: str = None,
):
self.page_number = page_number
self.page_size = page_size
self.search_table_ids = search_table_ids
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.search_table_ids is not None:
result['SearchTableIds'] = self.search_table_ids
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('SearchTableIds') is not None:
self.search_table_ids = m.get('SearchTableIds')
return self
class DescribeSearchTablesResponseBodyDataRecords(TeaModel):
def __init__(
self,
algorithm_id: str = None,
search_table_id: str = None,
search_table_name: str = None,
target_type: str = None,
):
self.algorithm_id = algorithm_id
self.search_table_id = search_table_id
self.search_table_name = search_table_name
self.target_type = target_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_id is not None:
result['AlgorithmId'] = self.algorithm_id
if self.search_table_id is not None:
result['SearchTableId'] = self.search_table_id
if self.search_table_name is not None:
result['SearchTableName'] = self.search_table_name
if self.target_type is not None:
result['TargetType'] = self.target_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmId') is not None:
self.algorithm_id = m.get('AlgorithmId')
if m.get('SearchTableId') is not None:
self.search_table_id = m.get('SearchTableId')
if m.get('SearchTableName') is not None:
self.search_table_name = m.get('SearchTableName')
if m.get('TargetType') is not None:
self.target_type = m.get('TargetType')
return self
class DescribeSearchTablesResponseBodyData(TeaModel):
def __init__(
self,
page_numbei: int = None,
page_size: int = None,
records: List[DescribeSearchTablesResponseBodyDataRecords] = None,
total_count: int = None,
):
self.page_numbei = page_numbei
self.page_size = page_size
self.records = records
self.total_count = total_count
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_numbei is not None:
result['PageNumbei'] = self.page_numbei
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumbei') is not None:
self.page_numbei = m.get('PageNumbei')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = DescribeSearchTablesResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
return self
class DescribeSearchTablesResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: DescribeSearchTablesResponseBodyData = None,
message: str = None,
request_id: str = None,
success: str = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = DescribeSearchTablesResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class DescribeSearchTablesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeSearchTablesResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeSearchTablesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeWatchItemsRequest(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
watch_item_ids: str = None,
watch_policy_id: str = None,
):
self.page_number = page_number
self.page_size = page_size
self.watch_item_ids = watch_item_ids
self.watch_policy_id = watch_policy_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.watch_item_ids is not None:
result['WatchItemIds'] = self.watch_item_ids
if self.watch_policy_id is not None:
result['WatchPolicyId'] = self.watch_policy_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('WatchItemIds') is not None:
self.watch_item_ids = m.get('WatchItemIds')
if m.get('WatchPolicyId') is not None:
self.watch_policy_id = m.get('WatchPolicyId')
return self
class DescribeWatchItemsResponseBodyDataRecords(TeaModel):
def __init__(
self,
item_attributes: str = None,
item_image_url: str = None,
watch_item_id: str = None,
watch_item_name: str = None,
):
self.item_attributes = item_attributes
self.item_image_url = item_image_url
self.watch_item_id = watch_item_id
self.watch_item_name = watch_item_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.item_attributes is not None:
result['ItemAttributes'] = self.item_attributes
if self.item_image_url is not None:
result['ItemImageUrl'] = self.item_image_url
if self.watch_item_id is not None:
result['WatchItemId'] = self.watch_item_id
if self.watch_item_name is not None:
result['WatchItemName'] = self.watch_item_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ItemAttributes') is not None:
self.item_attributes = m.get('ItemAttributes')
if m.get('ItemImageUrl') is not None:
self.item_image_url = m.get('ItemImageUrl')
if m.get('WatchItemId') is not None:
self.watch_item_id = m.get('WatchItemId')
if m.get('WatchItemName') is not None:
self.watch_item_name = m.get('WatchItemName')
return self
class DescribeWatchItemsResponseBodyData(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
records: List[DescribeWatchItemsResponseBodyDataRecords] = None,
total_count: int = None,
):
self.page_number = page_number
self.page_size = page_size
self.records = records
self.total_count = total_count
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = DescribeWatchItemsResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
return self
class DescribeWatchItemsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: DescribeWatchItemsResponseBodyData = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = DescribeWatchItemsResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class DescribeWatchItemsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeWatchItemsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeWatchItemsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeWatchPoliciesRequest(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
watch_policy_ids: str = None,
):
self.page_number = page_number
self.page_size = page_size
self.watch_policy_ids = watch_policy_ids
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.watch_policy_ids is not None:
result['WatchPolicyIds'] = self.watch_policy_ids
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('WatchPolicyIds') is not None:
self.watch_policy_ids = m.get('WatchPolicyIds')
return self
class DescribeWatchPoliciesResponseBodyDataRecords(TeaModel):
def __init__(
self,
item_match_type: str = None,
similarity_threshold: float = None,
target_type: str = None,
watch_mode: str = None,
watch_policy_id: str = None,
watch_policy_name: str = None,
):
self.item_match_type = item_match_type
self.similarity_threshold = similarity_threshold
self.target_type = target_type
self.watch_mode = watch_mode
self.watch_policy_id = watch_policy_id
self.watch_policy_name = watch_policy_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.item_match_type is not None:
result['ItemMatchType'] = self.item_match_type
if self.similarity_threshold is not None:
result['SimilarityThreshold'] = self.similarity_threshold
if self.target_type is not None:
result['TargetType'] = self.target_type
if self.watch_mode is not None:
result['WatchMode'] = self.watch_mode
if self.watch_policy_id is not None:
result['WatchPolicyId'] = self.watch_policy_id
if self.watch_policy_name is not None:
result['WatchPolicyName'] = self.watch_policy_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ItemMatchType') is not None:
self.item_match_type = m.get('ItemMatchType')
if m.get('SimilarityThreshold') is not None:
self.similarity_threshold = m.get('SimilarityThreshold')
if m.get('TargetType') is not None:
self.target_type = m.get('TargetType')
if m.get('WatchMode') is not None:
self.watch_mode = m.get('WatchMode')
if m.get('WatchPolicyId') is not None:
self.watch_policy_id = m.get('WatchPolicyId')
if m.get('WatchPolicyName') is not None:
self.watch_policy_name = m.get('WatchPolicyName')
return self
class DescribeWatchPoliciesResponseBodyData(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
records: List[DescribeWatchPoliciesResponseBodyDataRecords] = None,
total_count: int = None,
):
self.page_number = page_number
self.page_size = page_size
self.records = records
self.total_count = total_count
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = DescribeWatchPoliciesResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
return self
class DescribeWatchPoliciesResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: DescribeWatchPoliciesResponseBodyData = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = DescribeWatchPoliciesResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class DescribeWatchPoliciesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeWatchPoliciesResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeWatchPoliciesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeWatchTasksRequest(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
watch_task_ids: str = None,
):
self.page_number = page_number
self.page_size = page_size
self.watch_task_ids = watch_task_ids
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.watch_task_ids is not None:
result['WatchTaskIds'] = self.watch_task_ids
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('WatchTaskIds') is not None:
self.watch_task_ids = m.get('WatchTaskIds')
return self
class DescribeWatchTasksResponseBodyDataRecords(TeaModel):
def __init__(
self,
corp_id: str = None,
description: str = None,
device_list: str = None,
message_receiver: str = None,
schedule_cycle_dates: str = None,
schedule_times: str = None,
schedule_type: str = None,
task_name: str = None,
watch_policy_ids: str = None,
watch_task_id: str = None,
):
self.corp_id = corp_id
self.description = description
self.device_list = device_list
self.message_receiver = message_receiver
self.schedule_cycle_dates = schedule_cycle_dates
self.schedule_times = schedule_times
self.schedule_type = schedule_type
self.task_name = task_name
self.watch_policy_ids = watch_policy_ids
self.watch_task_id = watch_task_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.description is not None:
result['Description'] = self.description
if self.device_list is not None:
result['DeviceList'] = self.device_list
if self.message_receiver is not None:
result['MessageReceiver'] = self.message_receiver
if self.schedule_cycle_dates is not None:
result['ScheduleCycleDates'] = self.schedule_cycle_dates
if self.schedule_times is not None:
result['ScheduleTimes'] = self.schedule_times
if self.schedule_type is not None:
result['ScheduleType'] = self.schedule_type
if self.task_name is not None:
result['TaskName'] = self.task_name
if self.watch_policy_ids is not None:
result['WatchPolicyIds'] = self.watch_policy_ids
if self.watch_task_id is not None:
result['WatchTaskId'] = self.watch_task_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('Description') is not None:
self.description = m.get('Description')
if m.get('DeviceList') is not None:
self.device_list = m.get('DeviceList')
if m.get('MessageReceiver') is not None:
self.message_receiver = m.get('MessageReceiver')
if m.get('ScheduleCycleDates') is not None:
self.schedule_cycle_dates = m.get('ScheduleCycleDates')
if m.get('ScheduleTimes') is not None:
self.schedule_times = m.get('ScheduleTimes')
if m.get('ScheduleType') is not None:
self.schedule_type = m.get('ScheduleType')
if m.get('TaskName') is not None:
self.task_name = m.get('TaskName')
if m.get('WatchPolicyIds') is not None:
self.watch_policy_ids = m.get('WatchPolicyIds')
if m.get('WatchTaskId') is not None:
self.watch_task_id = m.get('WatchTaskId')
return self
class DescribeWatchTasksResponseBodyData(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
records: List[DescribeWatchTasksResponseBodyDataRecords] = None,
total_count: int = None,
):
self.page_number = page_number
self.page_size = page_size
self.records = records
self.total_count = total_count
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = DescribeWatchTasksResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
return self
class DescribeWatchTasksResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: DescribeWatchTasksResponseBodyData = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = DescribeWatchTasksResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class DescribeWatchTasksResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribeWatchTasksResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribeWatchTasksResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribesDoubleVerificationGroupsRequest(TeaModel):
def __init__(
self,
double_verification_group_ids: str = None,
id: str = None,
page_num: int = None,
page_size: int = None,
):
self.double_verification_group_ids = double_verification_group_ids
self.id = id
self.page_num = page_num
self.page_size = page_size
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.double_verification_group_ids is not None:
result['DoubleVerificationGroupIds'] = self.double_verification_group_ids
if self.id is not None:
result['Id'] = self.id
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DoubleVerificationGroupIds') is not None:
self.double_verification_group_ids = m.get('DoubleVerificationGroupIds')
if m.get('Id') is not None:
self.id = m.get('Id')
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
return self
class DescribesDoubleVerificationGroupsResponseBodyDoubleVerificationGroupsDoubleVerificationGroupListPersonIdList(TeaModel):
def __init__(
self,
person_id: str = None,
person_table_id: str = None,
):
self.person_id = person_id
self.person_table_id = person_table_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.person_table_id is not None:
result['PersonTableId'] = self.person_table_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('PersonTableId') is not None:
self.person_table_id = m.get('PersonTableId')
return self
class DescribesDoubleVerificationGroupsResponseBodyDoubleVerificationGroupsDoubleVerificationGroupList(TeaModel):
def __init__(
self,
device_id: str = None,
enabled: str = None,
group_id: str = None,
interval: int = None,
last_change: str = None,
member_number: int = None,
person_id_list: List[DescribesDoubleVerificationGroupsResponseBodyDoubleVerificationGroupsDoubleVerificationGroupListPersonIdList] = None,
):
self.device_id = device_id
self.enabled = enabled
self.group_id = group_id
self.interval = interval
self.last_change = last_change
self.member_number = member_number
self.person_id_list = person_id_list
def validate(self):
if self.person_id_list:
for k in self.person_id_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.enabled is not None:
result['Enabled'] = self.enabled
if self.group_id is not None:
result['GroupId'] = self.group_id
if self.interval is not None:
result['Interval'] = self.interval
if self.last_change is not None:
result['LastChange'] = self.last_change
if self.member_number is not None:
result['MemberNumber'] = self.member_number
result['PersonIdList'] = []
if self.person_id_list is not None:
for k in self.person_id_list:
result['PersonIdList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('Enabled') is not None:
self.enabled = m.get('Enabled')
if m.get('GroupId') is not None:
self.group_id = m.get('GroupId')
if m.get('Interval') is not None:
self.interval = m.get('Interval')
if m.get('LastChange') is not None:
self.last_change = m.get('LastChange')
if m.get('MemberNumber') is not None:
self.member_number = m.get('MemberNumber')
self.person_id_list = []
if m.get('PersonIdList') is not None:
for k in m.get('PersonIdList'):
temp_model = DescribesDoubleVerificationGroupsResponseBodyDoubleVerificationGroupsDoubleVerificationGroupListPersonIdList()
self.person_id_list.append(temp_model.from_map(k))
return self
class DescribesDoubleVerificationGroupsResponseBodyDoubleVerificationGroups(TeaModel):
def __init__(
self,
double_verification_group_list: List[DescribesDoubleVerificationGroupsResponseBodyDoubleVerificationGroupsDoubleVerificationGroupList] = None,
page_num: int = None,
page_size: int = None,
total_num: int = None,
):
self.double_verification_group_list = double_verification_group_list
self.page_num = page_num
self.page_size = page_size
self.total_num = total_num
def validate(self):
if self.double_verification_group_list:
for k in self.double_verification_group_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['DoubleVerificationGroupList'] = []
if self.double_verification_group_list is not None:
for k in self.double_verification_group_list:
result['DoubleVerificationGroupList'].append(k.to_map() if k else None)
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.total_num is not None:
result['TotalNum'] = self.total_num
return result
def from_map(self, m: dict = None):
m = m or dict()
self.double_verification_group_list = []
if m.get('DoubleVerificationGroupList') is not None:
for k in m.get('DoubleVerificationGroupList'):
temp_model = DescribesDoubleVerificationGroupsResponseBodyDoubleVerificationGroupsDoubleVerificationGroupList()
self.double_verification_group_list.append(temp_model.from_map(k))
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('TotalNum') is not None:
self.total_num = m.get('TotalNum')
return self
class DescribesDoubleVerificationGroupsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
double_verification_groups: DescribesDoubleVerificationGroupsResponseBodyDoubleVerificationGroups = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.double_verification_groups = double_verification_groups
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
if self.double_verification_groups:
self.double_verification_groups.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.double_verification_groups is not None:
result['DoubleVerificationGroups'] = self.double_verification_groups.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('DoubleVerificationGroups') is not None:
temp_model = DescribesDoubleVerificationGroupsResponseBodyDoubleVerificationGroups()
self.double_verification_groups = temp_model.from_map(m['DoubleVerificationGroups'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DescribesDoubleVerificationGroupsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: DescribesDoubleVerificationGroupsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = DescribesDoubleVerificationGroupsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class EchoStatusResponseBody(TeaModel):
def __init__(
self,
request_id: str = None,
):
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class EchoStatusResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: EchoStatusResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = EchoStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetAiotStorageInfoResponseBodyAiotStorageInfoEventAlarmMq(TeaModel):
def __init__(
self,
alarm_topic: str = None,
event_topic: str = None,
instance_id: str = None,
mq_type: str = None,
ram_arn_role: str = None,
region_id: str = None,
):
self.alarm_topic = alarm_topic
self.event_topic = event_topic
self.instance_id = instance_id
self.mq_type = mq_type
self.ram_arn_role = ram_arn_role
self.region_id = region_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.alarm_topic is not None:
result['AlarmTopic'] = self.alarm_topic
if self.event_topic is not None:
result['EventTopic'] = self.event_topic
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.mq_type is not None:
result['MqType'] = self.mq_type
if self.ram_arn_role is not None:
result['RamArnRole'] = self.ram_arn_role
if self.region_id is not None:
result['RegionId'] = self.region_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlarmTopic') is not None:
self.alarm_topic = m.get('AlarmTopic')
if m.get('EventTopic') is not None:
self.event_topic = m.get('EventTopic')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('MqType') is not None:
self.mq_type = m.get('MqType')
if m.get('RamArnRole') is not None:
self.ram_arn_role = m.get('RamArnRole')
if m.get('RegionId') is not None:
self.region_id = m.get('RegionId')
return self
class GetAiotStorageInfoResponseBodyAiotStorageInfoEventAlarmPictureStorage(TeaModel):
def __init__(
self,
bucket: str = None,
endpoint: str = None,
path: str = None,
proxy: str = None,
ram_arn_role: str = None,
storage_type: str = None,
):
self.bucket = bucket
self.endpoint = endpoint
self.path = path
self.proxy = proxy
self.ram_arn_role = ram_arn_role
self.storage_type = storage_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.bucket is not None:
result['Bucket'] = self.bucket
if self.endpoint is not None:
result['Endpoint'] = self.endpoint
if self.path is not None:
result['Path'] = self.path
if self.proxy is not None:
result['Proxy'] = self.proxy
if self.ram_arn_role is not None:
result['RamArnRole'] = self.ram_arn_role
if self.storage_type is not None:
result['StorageType'] = self.storage_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Bucket') is not None:
self.bucket = m.get('Bucket')
if m.get('Endpoint') is not None:
self.endpoint = m.get('Endpoint')
if m.get('Path') is not None:
self.path = m.get('Path')
if m.get('Proxy') is not None:
self.proxy = m.get('Proxy')
if m.get('RamArnRole') is not None:
self.ram_arn_role = m.get('RamArnRole')
if m.get('StorageType') is not None:
self.storage_type = m.get('StorageType')
return self
class GetAiotStorageInfoResponseBodyAiotStorageInfo(TeaModel):
def __init__(
self,
event_alarm_mq: GetAiotStorageInfoResponseBodyAiotStorageInfoEventAlarmMq = None,
event_alarm_picture_storage: GetAiotStorageInfoResponseBodyAiotStorageInfoEventAlarmPictureStorage = None,
):
self.event_alarm_mq = event_alarm_mq
self.event_alarm_picture_storage = event_alarm_picture_storage
def validate(self):
if self.event_alarm_mq:
self.event_alarm_mq.validate()
if self.event_alarm_picture_storage:
self.event_alarm_picture_storage.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.event_alarm_mq is not None:
result['EventAlarmMq'] = self.event_alarm_mq.to_map()
if self.event_alarm_picture_storage is not None:
result['EventAlarmPictureStorage'] = self.event_alarm_picture_storage.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('EventAlarmMq') is not None:
temp_model = GetAiotStorageInfoResponseBodyAiotStorageInfoEventAlarmMq()
self.event_alarm_mq = temp_model.from_map(m['EventAlarmMq'])
if m.get('EventAlarmPictureStorage') is not None:
temp_model = GetAiotStorageInfoResponseBodyAiotStorageInfoEventAlarmPictureStorage()
self.event_alarm_picture_storage = temp_model.from_map(m['EventAlarmPictureStorage'])
return self
class GetAiotStorageInfoResponseBody(TeaModel):
def __init__(
self,
aiot_storage_info: GetAiotStorageInfoResponseBodyAiotStorageInfo = None,
code: str = None,
message: str = None,
request_id: str = None,
):
self.aiot_storage_info = aiot_storage_info
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
if self.aiot_storage_info:
self.aiot_storage_info.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.aiot_storage_info is not None:
result['AiotStorageInfo'] = self.aiot_storage_info.to_map()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AiotStorageInfo') is not None:
temp_model = GetAiotStorageInfoResponseBodyAiotStorageInfo()
self.aiot_storage_info = temp_model.from_map(m['AiotStorageInfo'])
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class GetAiotStorageInfoResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetAiotStorageInfoResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetAiotStorageInfoResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetBodyOptionsRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
):
self.corp_id = corp_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
return self
class GetBodyOptionsResponseBodyDataOptionList(TeaModel):
def __init__(
self,
key: str = None,
name: str = None,
):
self.key = key
self.name = name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.key is not None:
result['Key'] = self.key
if self.name is not None:
result['Name'] = self.name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Key') is not None:
self.key = m.get('Key')
if m.get('Name') is not None:
self.name = m.get('Name')
return self
class GetBodyOptionsResponseBodyData(TeaModel):
def __init__(
self,
key: str = None,
name: str = None,
option_list: List[GetBodyOptionsResponseBodyDataOptionList] = None,
):
self.key = key
self.name = name
self.option_list = option_list
def validate(self):
if self.option_list:
for k in self.option_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.key is not None:
result['Key'] = self.key
if self.name is not None:
result['Name'] = self.name
result['OptionList'] = []
if self.option_list is not None:
for k in self.option_list:
result['OptionList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Key') is not None:
self.key = m.get('Key')
if m.get('Name') is not None:
self.name = m.get('Name')
self.option_list = []
if m.get('OptionList') is not None:
for k in m.get('OptionList'):
temp_model = GetBodyOptionsResponseBodyDataOptionList()
self.option_list.append(temp_model.from_map(k))
return self
class GetBodyOptionsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: List[GetBodyOptionsResponseBodyData] = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
for k in self.data:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
result['Data'] = []
if self.data is not None:
for k in self.data:
result['Data'].append(k.to_map() if k else None)
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
self.data = []
if m.get('Data') is not None:
for k in m.get('Data'):
temp_model = GetBodyOptionsResponseBodyData()
self.data.append(temp_model.from_map(k))
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class GetBodyOptionsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetBodyOptionsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetBodyOptionsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetCatalogListRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
isv_sub_id: str = None,
):
self.corp_id = corp_id
self.isv_sub_id = isv_sub_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
return self
class GetCatalogListResponseBodyData(TeaModel):
def __init__(
self,
catalog_id: int = None,
catalog_name: str = None,
isv_sub_id: str = None,
parent_catalog_id: int = None,
profile_count: int = None,
):
self.catalog_id = catalog_id
self.catalog_name = catalog_name
self.isv_sub_id = isv_sub_id
self.parent_catalog_id = parent_catalog_id
self.profile_count = profile_count
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.catalog_id is not None:
result['CatalogId'] = self.catalog_id
if self.catalog_name is not None:
result['CatalogName'] = self.catalog_name
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.parent_catalog_id is not None:
result['ParentCatalogId'] = self.parent_catalog_id
if self.profile_count is not None:
result['ProfileCount'] = self.profile_count
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CatalogId') is not None:
self.catalog_id = m.get('CatalogId')
if m.get('CatalogName') is not None:
self.catalog_name = m.get('CatalogName')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('ParentCatalogId') is not None:
self.parent_catalog_id = m.get('ParentCatalogId')
if m.get('ProfileCount') is not None:
self.profile_count = m.get('ProfileCount')
return self
class GetCatalogListResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: List[GetCatalogListResponseBodyData] = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
for k in self.data:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
result['Data'] = []
if self.data is not None:
for k in self.data:
result['Data'].append(k.to_map() if k else None)
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
self.data = []
if m.get('Data') is not None:
for k in m.get('Data'):
temp_model = GetCatalogListResponseBodyData()
self.data.append(temp_model.from_map(k))
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class GetCatalogListResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetCatalogListResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetCatalogListResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetCityCodeResponseBodyData(TeaModel):
def __init__(
self,
city_address: str = None,
city_code: str = None,
):
self.city_address = city_address
self.city_code = city_code
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.city_address is not None:
result['CityAddress'] = self.city_address
if self.city_code is not None:
result['CityCode'] = self.city_code
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CityAddress') is not None:
self.city_address = m.get('CityAddress')
if m.get('CityCode') is not None:
self.city_code = m.get('CityCode')
return self
class GetCityCodeResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: List[GetCityCodeResponseBodyData] = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
for k in self.data:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
result['Data'] = []
if self.data is not None:
for k in self.data:
result['Data'].append(k.to_map() if k else None)
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
self.data = []
if m.get('Data') is not None:
for k in m.get('Data'):
temp_model = GetCityCodeResponseBodyData()
self.data.append(temp_model.from_map(k))
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class GetCityCodeResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetCityCodeResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetCityCodeResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetDataSourceStatsRequest(TeaModel):
def __init__(
self,
corp_id_list: str = None,
data_source_type: str = None,
):
self.corp_id_list = corp_id_list
self.data_source_type = data_source_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id_list is not None:
result['CorpIdList'] = self.corp_id_list
if self.data_source_type is not None:
result['DataSourceType'] = self.data_source_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpIdList') is not None:
self.corp_id_list = m.get('CorpIdList')
if m.get('DataSourceType') is not None:
self.data_source_type = m.get('DataSourceType')
return self
class GetDataSourceStatsResponseBodyData(TeaModel):
def __init__(
self,
data_source_type: str = None,
items: Dict[str, Any] = None,
total: int = None,
):
self.data_source_type = data_source_type
self.items = items
self.total = total
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.data_source_type is not None:
result['DataSourceType'] = self.data_source_type
if self.items is not None:
result['Items'] = self.items
if self.total is not None:
result['Total'] = self.total
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DataSourceType') is not None:
self.data_source_type = m.get('DataSourceType')
if m.get('Items') is not None:
self.items = m.get('Items')
if m.get('Total') is not None:
self.total = m.get('Total')
return self
class GetDataSourceStatsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: List[GetDataSourceStatsResponseBodyData] = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
for k in self.data:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
result['Data'] = []
if self.data is not None:
for k in self.data:
result['Data'].append(k.to_map() if k else None)
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
self.data = []
if m.get('Data') is not None:
for k in m.get('Data'):
temp_model = GetDataSourceStatsResponseBodyData()
self.data.append(temp_model.from_map(k))
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class GetDataSourceStatsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetDataSourceStatsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetDataSourceStatsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetDeviceCaptureStrategyRequest(TeaModel):
def __init__(
self,
device_code: str = None,
device_type: str = None,
):
# 设备通道号
self.device_code = device_code
# 设备类型
self.device_type = device_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_code is not None:
result['DeviceCode'] = self.device_code
if self.device_type is not None:
result['DeviceType'] = self.device_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceCode') is not None:
self.device_code = m.get('DeviceCode')
if m.get('DeviceType') is not None:
self.device_type = m.get('DeviceType')
return self
class GetDeviceCaptureStrategyResponseBodyData(TeaModel):
def __init__(
self,
device_code: str = None,
device_type: str = None,
friday_capture_strategy: str = None,
monday_capture_strategy: str = None,
saturday_capture_strategy: str = None,
sunday_capture_strategy: str = None,
thursday_capture_strategy: str = None,
tuesday_capture_strategy: str = None,
wednesday_capture_strategy: str = None,
):
# 设备通道
self.device_code = device_code
# 设备类型
self.device_type = device_type
# 星期五抓取策略
self.friday_capture_strategy = friday_capture_strategy
# 星期一抓取策略
self.monday_capture_strategy = monday_capture_strategy
# 星期六抓取策略
self.saturday_capture_strategy = saturday_capture_strategy
# 星期日抓取策略
self.sunday_capture_strategy = sunday_capture_strategy
# 星期四抓取策略
self.thursday_capture_strategy = thursday_capture_strategy
# 星期二抓取策略
self.tuesday_capture_strategy = tuesday_capture_strategy
# 星期三抓取策略
self.wednesday_capture_strategy = wednesday_capture_strategy
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_code is not None:
result['DeviceCode'] = self.device_code
if self.device_type is not None:
result['DeviceType'] = self.device_type
if self.friday_capture_strategy is not None:
result['FridayCaptureStrategy'] = self.friday_capture_strategy
if self.monday_capture_strategy is not None:
result['MondayCaptureStrategy'] = self.monday_capture_strategy
if self.saturday_capture_strategy is not None:
result['SaturdayCaptureStrategy'] = self.saturday_capture_strategy
if self.sunday_capture_strategy is not None:
result['SundayCaptureStrategy'] = self.sunday_capture_strategy
if self.thursday_capture_strategy is not None:
result['ThursdayCaptureStrategy'] = self.thursday_capture_strategy
if self.tuesday_capture_strategy is not None:
result['TuesdayCaptureStrategy'] = self.tuesday_capture_strategy
if self.wednesday_capture_strategy is not None:
result['WednesdayCaptureStrategy'] = self.wednesday_capture_strategy
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceCode') is not None:
self.device_code = m.get('DeviceCode')
if m.get('DeviceType') is not None:
self.device_type = m.get('DeviceType')
if m.get('FridayCaptureStrategy') is not None:
self.friday_capture_strategy = m.get('FridayCaptureStrategy')
if m.get('MondayCaptureStrategy') is not None:
self.monday_capture_strategy = m.get('MondayCaptureStrategy')
if m.get('SaturdayCaptureStrategy') is not None:
self.saturday_capture_strategy = m.get('SaturdayCaptureStrategy')
if m.get('SundayCaptureStrategy') is not None:
self.sunday_capture_strategy = m.get('SundayCaptureStrategy')
if m.get('ThursdayCaptureStrategy') is not None:
self.thursday_capture_strategy = m.get('ThursdayCaptureStrategy')
if m.get('TuesdayCaptureStrategy') is not None:
self.tuesday_capture_strategy = m.get('TuesdayCaptureStrategy')
if m.get('WednesdayCaptureStrategy') is not None:
self.wednesday_capture_strategy = m.get('WednesdayCaptureStrategy')
return self
class GetDeviceCaptureStrategyResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: GetDeviceCaptureStrategyResponseBodyData = None,
message: str = None,
request_id: str = None,
):
# 错误码
self.code = code
# 响应数据内容
self.data = data
# 错误信息
self.message = message
# RequestId
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = GetDeviceCaptureStrategyResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class GetDeviceCaptureStrategyResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetDeviceCaptureStrategyResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetDeviceCaptureStrategyResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetDeviceConfigRequest(TeaModel):
def __init__(
self,
device_sn: str = None,
device_time_stamp: str = None,
):
self.device_sn = device_sn
self.device_time_stamp = device_time_stamp
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_sn is not None:
result['DeviceSn'] = self.device_sn
if self.device_time_stamp is not None:
result['DeviceTimeStamp'] = self.device_time_stamp
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceSn') is not None:
self.device_sn = m.get('DeviceSn')
if m.get('DeviceTimeStamp') is not None:
self.device_time_stamp = m.get('DeviceTimeStamp')
return self
class GetDeviceConfigResponseBodyChannelList(TeaModel):
def __init__(
self,
channel_gb_id: str = None,
friday_capture_strategy: str = None,
monday_capture_strategy: str = None,
saturday_capture_strategy: str = None,
sunday_capture_strategy: str = None,
thursday_capture_strategy: str = None,
tuesday_capture_strategy: str = None,
wednesday_capture_strategy: str = None,
):
self.channel_gb_id = channel_gb_id
self.friday_capture_strategy = friday_capture_strategy
self.monday_capture_strategy = monday_capture_strategy
self.saturday_capture_strategy = saturday_capture_strategy
self.sunday_capture_strategy = sunday_capture_strategy
self.thursday_capture_strategy = thursday_capture_strategy
self.tuesday_capture_strategy = tuesday_capture_strategy
self.wednesday_capture_strategy = wednesday_capture_strategy
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.channel_gb_id is not None:
result['ChannelGbId'] = self.channel_gb_id
if self.friday_capture_strategy is not None:
result['FridayCaptureStrategy'] = self.friday_capture_strategy
if self.monday_capture_strategy is not None:
result['MondayCaptureStrategy'] = self.monday_capture_strategy
if self.saturday_capture_strategy is not None:
result['SaturdayCaptureStrategy'] = self.saturday_capture_strategy
if self.sunday_capture_strategy is not None:
result['SundayCaptureStrategy'] = self.sunday_capture_strategy
if self.thursday_capture_strategy is not None:
result['ThursdayCaptureStrategy'] = self.thursday_capture_strategy
if self.tuesday_capture_strategy is not None:
result['TuesdayCaptureStrategy'] = self.tuesday_capture_strategy
if self.wednesday_capture_strategy is not None:
result['WednesdayCaptureStrategy'] = self.wednesday_capture_strategy
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ChannelGbId') is not None:
self.channel_gb_id = m.get('ChannelGbId')
if m.get('FridayCaptureStrategy') is not None:
self.friday_capture_strategy = m.get('FridayCaptureStrategy')
if m.get('MondayCaptureStrategy') is not None:
self.monday_capture_strategy = m.get('MondayCaptureStrategy')
if m.get('SaturdayCaptureStrategy') is not None:
self.saturday_capture_strategy = m.get('SaturdayCaptureStrategy')
if m.get('SundayCaptureStrategy') is not None:
self.sunday_capture_strategy = m.get('SundayCaptureStrategy')
if m.get('ThursdayCaptureStrategy') is not None:
self.thursday_capture_strategy = m.get('ThursdayCaptureStrategy')
if m.get('TuesdayCaptureStrategy') is not None:
self.tuesday_capture_strategy = m.get('TuesdayCaptureStrategy')
if m.get('WednesdayCaptureStrategy') is not None:
self.wednesday_capture_strategy = m.get('WednesdayCaptureStrategy')
return self
class GetDeviceConfigResponseBodyOSDList(TeaModel):
def __init__(
self,
left_top_x: str = None,
left_top_y: str = None,
text: str = None,
):
self.left_top_x = left_top_x
self.left_top_y = left_top_y
self.text = text
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.left_top_x is not None:
result['LeftTopX'] = self.left_top_x
if self.left_top_y is not None:
result['LeftTopY'] = self.left_top_y
if self.text is not None:
result['Text'] = self.text
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('LeftTopX') is not None:
self.left_top_x = m.get('LeftTopX')
if m.get('LeftTopY') is not None:
self.left_top_y = m.get('LeftTopY')
if m.get('Text') is not None:
self.text = m.get('Text')
return self
class GetDeviceConfigResponseBody(TeaModel):
def __init__(
self,
audio_enable: str = None,
audio_format: str = None,
bit_rate: str = None,
channel_list: List[GetDeviceConfigResponseBodyChannelList] = None,
code: str = None,
device_address: str = None,
device_id: str = None,
device_name: str = None,
encode_format: str = None,
frame_rate: str = None,
gov_length: int = None,
latitude: str = None,
longitude: str = None,
message: str = None,
osdlist: List[GetDeviceConfigResponseBodyOSDList] = None,
osdtime_enable: str = None,
osdtime_type: str = None,
osdtime_x: str = None,
osdtime_y: str = None,
pass_word: str = None,
protocol: str = None,
request_id: str = None,
resolution: str = None,
retry_interval: str = None,
server_id: str = None,
server_ip: str = None,
server_port: str = None,
user_name: str = None,
):
self.audio_enable = audio_enable
self.audio_format = audio_format
self.bit_rate = bit_rate
self.channel_list = channel_list
self.code = code
self.device_address = device_address
self.device_id = device_id
self.device_name = device_name
self.encode_format = encode_format
self.frame_rate = frame_rate
self.gov_length = gov_length
self.latitude = latitude
self.longitude = longitude
self.message = message
self.osdlist = osdlist
self.osdtime_enable = osdtime_enable
self.osdtime_type = osdtime_type
self.osdtime_x = osdtime_x
self.osdtime_y = osdtime_y
self.pass_word = pass_word
self.protocol = protocol
# Id of the request
self.request_id = request_id
self.resolution = resolution
self.retry_interval = retry_interval
self.server_id = server_id
self.server_ip = server_ip
self.server_port = server_port
self.user_name = user_name
def validate(self):
if self.channel_list:
for k in self.channel_list:
if k:
k.validate()
if self.osdlist:
for k in self.osdlist:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.audio_enable is not None:
result['AudioEnable'] = self.audio_enable
if self.audio_format is not None:
result['AudioFormat'] = self.audio_format
if self.bit_rate is not None:
result['BitRate'] = self.bit_rate
result['ChannelList'] = []
if self.channel_list is not None:
for k in self.channel_list:
result['ChannelList'].append(k.to_map() if k else None)
if self.code is not None:
result['Code'] = self.code
if self.device_address is not None:
result['DeviceAddress'] = self.device_address
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.device_name is not None:
result['DeviceName'] = self.device_name
if self.encode_format is not None:
result['EncodeFormat'] = self.encode_format
if self.frame_rate is not None:
result['FrameRate'] = self.frame_rate
if self.gov_length is not None:
result['GovLength'] = self.gov_length
if self.latitude is not None:
result['Latitude'] = self.latitude
if self.longitude is not None:
result['Longitude'] = self.longitude
if self.message is not None:
result['Message'] = self.message
result['OSDList'] = []
if self.osdlist is not None:
for k in self.osdlist:
result['OSDList'].append(k.to_map() if k else None)
if self.osdtime_enable is not None:
result['OSDTimeEnable'] = self.osdtime_enable
if self.osdtime_type is not None:
result['OSDTimeType'] = self.osdtime_type
if self.osdtime_x is not None:
result['OSDTimeX'] = self.osdtime_x
if self.osdtime_y is not None:
result['OSDTimeY'] = self.osdtime_y
if self.pass_word is not None:
result['PassWord'] = self.pass_word
if self.protocol is not None:
result['Protocol'] = self.protocol
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.resolution is not None:
result['Resolution'] = self.resolution
if self.retry_interval is not None:
result['RetryInterval'] = self.retry_interval
if self.server_id is not None:
result['ServerId'] = self.server_id
if self.server_ip is not None:
result['ServerIp'] = self.server_ip
if self.server_port is not None:
result['ServerPort'] = self.server_port
if self.user_name is not None:
result['UserName'] = self.user_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AudioEnable') is not None:
self.audio_enable = m.get('AudioEnable')
if m.get('AudioFormat') is not None:
self.audio_format = m.get('AudioFormat')
if m.get('BitRate') is not None:
self.bit_rate = m.get('BitRate')
self.channel_list = []
if m.get('ChannelList') is not None:
for k in m.get('ChannelList'):
temp_model = GetDeviceConfigResponseBodyChannelList()
self.channel_list.append(temp_model.from_map(k))
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('DeviceAddress') is not None:
self.device_address = m.get('DeviceAddress')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('DeviceName') is not None:
self.device_name = m.get('DeviceName')
if m.get('EncodeFormat') is not None:
self.encode_format = m.get('EncodeFormat')
if m.get('FrameRate') is not None:
self.frame_rate = m.get('FrameRate')
if m.get('GovLength') is not None:
self.gov_length = m.get('GovLength')
if m.get('Latitude') is not None:
self.latitude = m.get('Latitude')
if m.get('Longitude') is not None:
self.longitude = m.get('Longitude')
if m.get('Message') is not None:
self.message = m.get('Message')
self.osdlist = []
if m.get('OSDList') is not None:
for k in m.get('OSDList'):
temp_model = GetDeviceConfigResponseBodyOSDList()
self.osdlist.append(temp_model.from_map(k))
if m.get('OSDTimeEnable') is not None:
self.osdtime_enable = m.get('OSDTimeEnable')
if m.get('OSDTimeType') is not None:
self.osdtime_type = m.get('OSDTimeType')
if m.get('OSDTimeX') is not None:
self.osdtime_x = m.get('OSDTimeX')
if m.get('OSDTimeY') is not None:
self.osdtime_y = m.get('OSDTimeY')
if m.get('PassWord') is not None:
self.pass_word = m.get('PassWord')
if m.get('Protocol') is not None:
self.protocol = m.get('Protocol')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Resolution') is not None:
self.resolution = m.get('Resolution')
if m.get('RetryInterval') is not None:
self.retry_interval = m.get('RetryInterval')
if m.get('ServerId') is not None:
self.server_id = m.get('ServerId')
if m.get('ServerIp') is not None:
self.server_ip = m.get('ServerIp')
if m.get('ServerPort') is not None:
self.server_port = m.get('ServerPort')
if m.get('UserName') is not None:
self.user_name = m.get('UserName')
return self
class GetDeviceConfigResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetDeviceConfigResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetDeviceConfigResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetDeviceCountRequest(TeaModel):
def __init__(
self,
up_stream_mode: str = None,
):
self.up_stream_mode = up_stream_mode
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.up_stream_mode is not None:
result['UpStreamMode'] = self.up_stream_mode
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('UpStreamMode') is not None:
self.up_stream_mode = m.get('UpStreamMode')
return self
class GetDeviceCountResponseBodyData(TeaModel):
def __init__(
self,
device_count: int = None,
):
self.device_count = device_count
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_count is not None:
result['DeviceCount'] = self.device_count
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceCount') is not None:
self.device_count = m.get('DeviceCount')
return self
class GetDeviceCountResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: GetDeviceCountResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = GetDeviceCountResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class GetDeviceCountResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetDeviceCountResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetDeviceCountResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetDeviceLiveUrlRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
device_id: str = None,
gb_id: str = None,
out_protocol: str = None,
stream_type: str = None,
):
self.corp_id = corp_id
self.device_id = device_id
self.gb_id = gb_id
self.out_protocol = out_protocol
self.stream_type = stream_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.gb_id is not None:
result['GbId'] = self.gb_id
if self.out_protocol is not None:
result['OutProtocol'] = self.out_protocol
if self.stream_type is not None:
result['StreamType'] = self.stream_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('GbId') is not None:
self.gb_id = m.get('GbId')
if m.get('OutProtocol') is not None:
self.out_protocol = m.get('OutProtocol')
if m.get('StreamType') is not None:
self.stream_type = m.get('StreamType')
return self
class GetDeviceLiveUrlResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
out_protocol: str = None,
request_id: str = None,
stream_type: int = None,
url: str = None,
):
self.code = code
self.message = message
self.out_protocol = out_protocol
self.request_id = request_id
self.stream_type = stream_type
self.url = url
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.out_protocol is not None:
result['OutProtocol'] = self.out_protocol
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.stream_type is not None:
result['StreamType'] = self.stream_type
if self.url is not None:
result['Url'] = self.url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('OutProtocol') is not None:
self.out_protocol = m.get('OutProtocol')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('StreamType') is not None:
self.stream_type = m.get('StreamType')
if m.get('Url') is not None:
self.url = m.get('Url')
return self
class GetDeviceLiveUrlResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetDeviceLiveUrlResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetDeviceLiveUrlResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetDevicePictureRequest(TeaModel):
def __init__(
self,
device_id: str = None,
):
self.device_id = device_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_id is not None:
result['DeviceId'] = self.device_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
return self
class GetDevicePictureResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class GetDevicePictureResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetDevicePictureResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetDevicePictureResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetDeviceStatsRequest(TeaModel):
def __init__(
self,
corp_id_list: str = None,
device_status: str = None,
device_type: str = None,
):
self.corp_id_list = corp_id_list
self.device_status = device_status
self.device_type = device_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id_list is not None:
result['CorpIdList'] = self.corp_id_list
if self.device_status is not None:
result['DeviceStatus'] = self.device_status
if self.device_type is not None:
result['DeviceType'] = self.device_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpIdList') is not None:
self.corp_id_list = m.get('CorpIdList')
if m.get('DeviceStatus') is not None:
self.device_status = m.get('DeviceStatus')
if m.get('DeviceType') is not None:
self.device_type = m.get('DeviceType')
return self
class GetDeviceStatsResponseBodyData(TeaModel):
def __init__(
self,
count: int = None,
device_status: str = None,
device_type: str = None,
):
self.count = count
self.device_status = device_status
self.device_type = device_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.count is not None:
result['Count'] = self.count
if self.device_status is not None:
result['DeviceStatus'] = self.device_status
if self.device_type is not None:
result['DeviceType'] = self.device_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Count') is not None:
self.count = m.get('Count')
if m.get('DeviceStatus') is not None:
self.device_status = m.get('DeviceStatus')
if m.get('DeviceType') is not None:
self.device_type = m.get('DeviceType')
return self
class GetDeviceStatsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: List[GetDeviceStatsResponseBodyData] = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
for k in self.data:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
result['Data'] = []
if self.data is not None:
for k in self.data:
result['Data'].append(k.to_map() if k else None)
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
self.data = []
if m.get('Data') is not None:
for k in m.get('Data'):
temp_model = GetDeviceStatsResponseBodyData()
self.data.append(temp_model.from_map(k))
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class GetDeviceStatsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetDeviceStatsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetDeviceStatsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetDeviceVideoUrlRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
device_id: str = None,
end_time: int = None,
gb_id: str = None,
out_protocol: str = None,
start_time: int = None,
storage_type: str = None,
):
self.corp_id = corp_id
self.device_id = device_id
self.end_time = end_time
self.gb_id = gb_id
self.out_protocol = out_protocol
self.start_time = start_time
self.storage_type = storage_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.gb_id is not None:
result['GbId'] = self.gb_id
if self.out_protocol is not None:
result['OutProtocol'] = self.out_protocol
if self.start_time is not None:
result['StartTime'] = self.start_time
if self.storage_type is not None:
result['StorageType'] = self.storage_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('GbId') is not None:
self.gb_id = m.get('GbId')
if m.get('OutProtocol') is not None:
self.out_protocol = m.get('OutProtocol')
if m.get('StartTime') is not None:
self.start_time = m.get('StartTime')
if m.get('StorageType') is not None:
self.storage_type = m.get('StorageType')
return self
class GetDeviceVideoUrlResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
out_protocol: str = None,
request_id: str = None,
url: str = None,
):
self.code = code
self.message = message
self.out_protocol = out_protocol
self.request_id = request_id
self.url = url
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.out_protocol is not None:
result['OutProtocol'] = self.out_protocol
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.url is not None:
result['Url'] = self.url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('OutProtocol') is not None:
self.out_protocol = m.get('OutProtocol')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Url') is not None:
self.url = m.get('Url')
return self
class GetDeviceVideoUrlResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetDeviceVideoUrlResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetDeviceVideoUrlResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetFaceModelResultRequest(TeaModel):
def __init__(
self,
picture_content: str = None,
picture_id: str = None,
picture_url: str = None,
):
self.picture_content = picture_content
self.picture_id = picture_id
self.picture_url = picture_url
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.picture_content is not None:
result['PictureContent'] = self.picture_content
if self.picture_id is not None:
result['PictureId'] = self.picture_id
if self.picture_url is not None:
result['PictureUrl'] = self.picture_url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PictureContent') is not None:
self.picture_content = m.get('PictureContent')
if m.get('PictureId') is not None:
self.picture_id = m.get('PictureId')
if m.get('PictureUrl') is not None:
self.picture_url = m.get('PictureUrl')
return self
class GetFaceModelResultResponseBodyDataRecords(TeaModel):
def __init__(
self,
age_lower_limit: int = None,
age_lower_limit_reliability: str = None,
age_up_limit: int = None,
age_up_limit_reliability: str = None,
cap_color: int = None,
cap_color_reliability: str = None,
cap_style: int = None,
cap_style_reliability: str = None,
ethic_code: int = None,
ethic_code_reliability: str = None,
face_style: str = None,
face_style_reliability: str = None,
feature_data: List[float] = None,
gender_code: int = None,
gender_code_reliability: str = None,
glass_color: int = None,
glass_color_reliability: str = None,
glass_style: int = None,
glass_style_reliability: str = None,
hair_color: int = None,
hair_color_reliability: str = None,
hair_style: int = None,
hair_style_reliability: str = None,
left_top_x: float = None,
left_top_y: float = None,
mustache_style: str = None,
mustache_style_reliability: str = None,
respirator_color: int = None,
respirator_color_reliability: str = None,
right_bottom_x: float = None,
right_bottom_y: float = None,
skin_color: int = None,
skin_color_reliability: str = None,
):
self.age_lower_limit = age_lower_limit
self.age_lower_limit_reliability = age_lower_limit_reliability
self.age_up_limit = age_up_limit
self.age_up_limit_reliability = age_up_limit_reliability
self.cap_color = cap_color
self.cap_color_reliability = cap_color_reliability
self.cap_style = cap_style
self.cap_style_reliability = cap_style_reliability
self.ethic_code = ethic_code
self.ethic_code_reliability = ethic_code_reliability
self.face_style = face_style
self.face_style_reliability = face_style_reliability
self.feature_data = feature_data
self.gender_code = gender_code
self.gender_code_reliability = gender_code_reliability
self.glass_color = glass_color
self.glass_color_reliability = glass_color_reliability
self.glass_style = glass_style
self.glass_style_reliability = glass_style_reliability
self.hair_color = hair_color
self.hair_color_reliability = hair_color_reliability
self.hair_style = hair_style
self.hair_style_reliability = hair_style_reliability
self.left_top_x = left_top_x
self.left_top_y = left_top_y
self.mustache_style = mustache_style
self.mustache_style_reliability = mustache_style_reliability
self.respirator_color = respirator_color
self.respirator_color_reliability = respirator_color_reliability
self.right_bottom_x = right_bottom_x
self.right_bottom_y = right_bottom_y
self.skin_color = skin_color
self.skin_color_reliability = skin_color_reliability
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.age_lower_limit is not None:
result['AgeLowerLimit'] = self.age_lower_limit
if self.age_lower_limit_reliability is not None:
result['AgeLowerLimitReliability'] = self.age_lower_limit_reliability
if self.age_up_limit is not None:
result['AgeUpLimit'] = self.age_up_limit
if self.age_up_limit_reliability is not None:
result['AgeUpLimitReliability'] = self.age_up_limit_reliability
if self.cap_color is not None:
result['CapColor'] = self.cap_color
if self.cap_color_reliability is not None:
result['CapColorReliability'] = self.cap_color_reliability
if self.cap_style is not None:
result['CapStyle'] = self.cap_style
if self.cap_style_reliability is not None:
result['CapStyleReliability'] = self.cap_style_reliability
if self.ethic_code is not None:
result['EthicCode'] = self.ethic_code
if self.ethic_code_reliability is not None:
result['EthicCodeReliability'] = self.ethic_code_reliability
if self.face_style is not None:
result['FaceStyle'] = self.face_style
if self.face_style_reliability is not None:
result['FaceStyleReliability'] = self.face_style_reliability
if self.feature_data is not None:
result['FeatureData'] = self.feature_data
if self.gender_code is not None:
result['GenderCode'] = self.gender_code
if self.gender_code_reliability is not None:
result['GenderCodeReliability'] = self.gender_code_reliability
if self.glass_color is not None:
result['GlassColor'] = self.glass_color
if self.glass_color_reliability is not None:
result['GlassColorReliability'] = self.glass_color_reliability
if self.glass_style is not None:
result['GlassStyle'] = self.glass_style
if self.glass_style_reliability is not None:
result['GlassStyleReliability'] = self.glass_style_reliability
if self.hair_color is not None:
result['HairColor'] = self.hair_color
if self.hair_color_reliability is not None:
result['HairColorReliability'] = self.hair_color_reliability
if self.hair_style is not None:
result['HairStyle'] = self.hair_style
if self.hair_style_reliability is not None:
result['HairStyleReliability'] = self.hair_style_reliability
if self.left_top_x is not None:
result['LeftTopX'] = self.left_top_x
if self.left_top_y is not None:
result['LeftTopY'] = self.left_top_y
if self.mustache_style is not None:
result['MustacheStyle'] = self.mustache_style
if self.mustache_style_reliability is not None:
result['MustacheStyleReliability'] = self.mustache_style_reliability
if self.respirator_color is not None:
result['RespiratorColor'] = self.respirator_color
if self.respirator_color_reliability is not None:
result['RespiratorColorReliability'] = self.respirator_color_reliability
if self.right_bottom_x is not None:
result['RightBottomX'] = self.right_bottom_x
if self.right_bottom_y is not None:
result['RightBottomY'] = self.right_bottom_y
if self.skin_color is not None:
result['SkinColor'] = self.skin_color
if self.skin_color_reliability is not None:
result['SkinColorReliability'] = self.skin_color_reliability
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AgeLowerLimit') is not None:
self.age_lower_limit = m.get('AgeLowerLimit')
if m.get('AgeLowerLimitReliability') is not None:
self.age_lower_limit_reliability = m.get('AgeLowerLimitReliability')
if m.get('AgeUpLimit') is not None:
self.age_up_limit = m.get('AgeUpLimit')
if m.get('AgeUpLimitReliability') is not None:
self.age_up_limit_reliability = m.get('AgeUpLimitReliability')
if m.get('CapColor') is not None:
self.cap_color = m.get('CapColor')
if m.get('CapColorReliability') is not None:
self.cap_color_reliability = m.get('CapColorReliability')
if m.get('CapStyle') is not None:
self.cap_style = m.get('CapStyle')
if m.get('CapStyleReliability') is not None:
self.cap_style_reliability = m.get('CapStyleReliability')
if m.get('EthicCode') is not None:
self.ethic_code = m.get('EthicCode')
if m.get('EthicCodeReliability') is not None:
self.ethic_code_reliability = m.get('EthicCodeReliability')
if m.get('FaceStyle') is not None:
self.face_style = m.get('FaceStyle')
if m.get('FaceStyleReliability') is not None:
self.face_style_reliability = m.get('FaceStyleReliability')
if m.get('FeatureData') is not None:
self.feature_data = m.get('FeatureData')
if m.get('GenderCode') is not None:
self.gender_code = m.get('GenderCode')
if m.get('GenderCodeReliability') is not None:
self.gender_code_reliability = m.get('GenderCodeReliability')
if m.get('GlassColor') is not None:
self.glass_color = m.get('GlassColor')
if m.get('GlassColorReliability') is not None:
self.glass_color_reliability = m.get('GlassColorReliability')
if m.get('GlassStyle') is not None:
self.glass_style = m.get('GlassStyle')
if m.get('GlassStyleReliability') is not None:
self.glass_style_reliability = m.get('GlassStyleReliability')
if m.get('HairColor') is not None:
self.hair_color = m.get('HairColor')
if m.get('HairColorReliability') is not None:
self.hair_color_reliability = m.get('HairColorReliability')
if m.get('HairStyle') is not None:
self.hair_style = m.get('HairStyle')
if m.get('HairStyleReliability') is not None:
self.hair_style_reliability = m.get('HairStyleReliability')
if m.get('LeftTopX') is not None:
self.left_top_x = m.get('LeftTopX')
if m.get('LeftTopY') is not None:
self.left_top_y = m.get('LeftTopY')
if m.get('MustacheStyle') is not None:
self.mustache_style = m.get('MustacheStyle')
if m.get('MustacheStyleReliability') is not None:
self.mustache_style_reliability = m.get('MustacheStyleReliability')
if m.get('RespiratorColor') is not None:
self.respirator_color = m.get('RespiratorColor')
if m.get('RespiratorColorReliability') is not None:
self.respirator_color_reliability = m.get('RespiratorColorReliability')
if m.get('RightBottomX') is not None:
self.right_bottom_x = m.get('RightBottomX')
if m.get('RightBottomY') is not None:
self.right_bottom_y = m.get('RightBottomY')
if m.get('SkinColor') is not None:
self.skin_color = m.get('SkinColor')
if m.get('SkinColorReliability') is not None:
self.skin_color_reliability = m.get('SkinColorReliability')
return self
class GetFaceModelResultResponseBodyData(TeaModel):
def __init__(
self,
records: List[GetFaceModelResultResponseBodyDataRecords] = None,
):
self.records = records
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = GetFaceModelResultResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
return self
class GetFaceModelResultResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: GetFaceModelResultResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = GetFaceModelResultResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class GetFaceModelResultResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetFaceModelResultResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetFaceModelResultResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetFaceOptionsRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
):
self.corp_id = corp_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
return self
class GetFaceOptionsResponseBodyDataOptionList(TeaModel):
def __init__(
self,
key: str = None,
name: str = None,
):
self.key = key
self.name = name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.key is not None:
result['Key'] = self.key
if self.name is not None:
result['Name'] = self.name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Key') is not None:
self.key = m.get('Key')
if m.get('Name') is not None:
self.name = m.get('Name')
return self
class GetFaceOptionsResponseBodyData(TeaModel):
def __init__(
self,
key: str = None,
name: str = None,
option_list: List[GetFaceOptionsResponseBodyDataOptionList] = None,
):
self.key = key
self.name = name
self.option_list = option_list
def validate(self):
if self.option_list:
for k in self.option_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.key is not None:
result['Key'] = self.key
if self.name is not None:
result['Name'] = self.name
result['OptionList'] = []
if self.option_list is not None:
for k in self.option_list:
result['OptionList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Key') is not None:
self.key = m.get('Key')
if m.get('Name') is not None:
self.name = m.get('Name')
self.option_list = []
if m.get('OptionList') is not None:
for k in m.get('OptionList'):
temp_model = GetFaceOptionsResponseBodyDataOptionList()
self.option_list.append(temp_model.from_map(k))
return self
class GetFaceOptionsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: List[GetFaceOptionsResponseBodyData] = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
for k in self.data:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
result['Data'] = []
if self.data is not None:
for k in self.data:
result['Data'].append(k.to_map() if k else None)
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
self.data = []
if m.get('Data') is not None:
for k in m.get('Data'):
temp_model = GetFaceOptionsResponseBodyData()
self.data.append(temp_model.from_map(k))
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class GetFaceOptionsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetFaceOptionsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetFaceOptionsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetInventoryRequest(TeaModel):
def __init__(
self,
commodity_code: str = None,
):
self.commodity_code = commodity_code
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.commodity_code is not None:
result['CommodityCode'] = self.commodity_code
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CommodityCode') is not None:
self.commodity_code = m.get('CommodityCode')
return self
class GetInventoryResponseBodyDataResultObject(TeaModel):
def __init__(
self,
buyer_id: str = None,
commodity_code: str = None,
current_inventory: str = None,
instance_id: str = None,
inventory_id: str = None,
valid_end_time: str = None,
valid_start_time: str = None,
):
self.buyer_id = buyer_id
self.commodity_code = commodity_code
self.current_inventory = current_inventory
self.instance_id = instance_id
self.inventory_id = inventory_id
self.valid_end_time = valid_end_time
self.valid_start_time = valid_start_time
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.buyer_id is not None:
result['BuyerId'] = self.buyer_id
if self.commodity_code is not None:
result['CommodityCode'] = self.commodity_code
if self.current_inventory is not None:
result['CurrentInventory'] = self.current_inventory
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.inventory_id is not None:
result['InventoryId'] = self.inventory_id
if self.valid_end_time is not None:
result['ValidEndTime'] = self.valid_end_time
if self.valid_start_time is not None:
result['ValidStartTime'] = self.valid_start_time
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BuyerId') is not None:
self.buyer_id = m.get('BuyerId')
if m.get('CommodityCode') is not None:
self.commodity_code = m.get('CommodityCode')
if m.get('CurrentInventory') is not None:
self.current_inventory = m.get('CurrentInventory')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('InventoryId') is not None:
self.inventory_id = m.get('InventoryId')
if m.get('ValidEndTime') is not None:
self.valid_end_time = m.get('ValidEndTime')
if m.get('ValidStartTime') is not None:
self.valid_start_time = m.get('ValidStartTime')
return self
class GetInventoryResponseBodyData(TeaModel):
def __init__(
self,
result_object: List[GetInventoryResponseBodyDataResultObject] = None,
):
self.result_object = result_object
def validate(self):
if self.result_object:
for k in self.result_object:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['ResultObject'] = []
if self.result_object is not None:
for k in self.result_object:
result['ResultObject'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
self.result_object = []
if m.get('ResultObject') is not None:
for k in m.get('ResultObject'):
temp_model = GetInventoryResponseBodyDataResultObject()
self.result_object.append(temp_model.from_map(k))
return self
class GetInventoryResponseBody(TeaModel):
def __init__(
self,
data: GetInventoryResponseBodyData = None,
success: bool = None,
):
self.data = data
self.success = success
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.data is not None:
result['Data'] = self.data.to_map()
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Data') is not None:
temp_model = GetInventoryResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class GetInventoryResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetInventoryResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetInventoryResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetMonitorListRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
page_number: int = None,
page_size: int = None,
):
self.corp_id = corp_id
self.page_number = page_number
self.page_size = page_size
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
return self
class GetMonitorListResponseBodyDataRecords(TeaModel):
def __init__(
self,
algorithm_vendor: str = None,
attributes: str = None,
create_date: str = None,
description: str = None,
device_list: str = None,
expression: str = None,
image_match: str = None,
modified_date: str = None,
monitor_type: str = None,
notifier_extend_values: str = None,
notifier_type: str = None,
rule_expression: str = None,
rule_name: str = None,
status: str = None,
task_id: str = None,
):
self.algorithm_vendor = algorithm_vendor
self.attributes = attributes
self.create_date = create_date
self.description = description
self.device_list = device_list
self.expression = expression
self.image_match = image_match
self.modified_date = modified_date
self.monitor_type = monitor_type
self.notifier_extend_values = notifier_extend_values
self.notifier_type = notifier_type
self.rule_expression = rule_expression
self.rule_name = rule_name
self.status = status
self.task_id = task_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_vendor is not None:
result['AlgorithmVendor'] = self.algorithm_vendor
if self.attributes is not None:
result['Attributes'] = self.attributes
if self.create_date is not None:
result['CreateDate'] = self.create_date
if self.description is not None:
result['Description'] = self.description
if self.device_list is not None:
result['DeviceList'] = self.device_list
if self.expression is not None:
result['Expression'] = self.expression
if self.image_match is not None:
result['ImageMatch'] = self.image_match
if self.modified_date is not None:
result['ModifiedDate'] = self.modified_date
if self.monitor_type is not None:
result['MonitorType'] = self.monitor_type
if self.notifier_extend_values is not None:
result['NotifierExtendValues'] = self.notifier_extend_values
if self.notifier_type is not None:
result['NotifierType'] = self.notifier_type
if self.rule_expression is not None:
result['RuleExpression'] = self.rule_expression
if self.rule_name is not None:
result['RuleName'] = self.rule_name
if self.status is not None:
result['Status'] = self.status
if self.task_id is not None:
result['TaskId'] = self.task_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmVendor') is not None:
self.algorithm_vendor = m.get('AlgorithmVendor')
if m.get('Attributes') is not None:
self.attributes = m.get('Attributes')
if m.get('CreateDate') is not None:
self.create_date = m.get('CreateDate')
if m.get('Description') is not None:
self.description = m.get('Description')
if m.get('DeviceList') is not None:
self.device_list = m.get('DeviceList')
if m.get('Expression') is not None:
self.expression = m.get('Expression')
if m.get('ImageMatch') is not None:
self.image_match = m.get('ImageMatch')
if m.get('ModifiedDate') is not None:
self.modified_date = m.get('ModifiedDate')
if m.get('MonitorType') is not None:
self.monitor_type = m.get('MonitorType')
if m.get('NotifierExtendValues') is not None:
self.notifier_extend_values = m.get('NotifierExtendValues')
if m.get('NotifierType') is not None:
self.notifier_type = m.get('NotifierType')
if m.get('RuleExpression') is not None:
self.rule_expression = m.get('RuleExpression')
if m.get('RuleName') is not None:
self.rule_name = m.get('RuleName')
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('TaskId') is not None:
self.task_id = m.get('TaskId')
return self
class GetMonitorListResponseBodyData(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
records: List[GetMonitorListResponseBodyDataRecords] = None,
total_count: int = None,
total_page: int = None,
):
self.page_number = page_number
self.page_size = page_size
self.records = records
self.total_count = total_count
self.total_page = total_page
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.total_page is not None:
result['TotalPage'] = self.total_page
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = GetMonitorListResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('TotalPage') is not None:
self.total_page = m.get('TotalPage')
return self
class GetMonitorListResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: GetMonitorListResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = GetMonitorListResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class GetMonitorListResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetMonitorListResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetMonitorListResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetMonitorResultRequest(TeaModel):
def __init__(
self,
algorithm_vendor: str = None,
corp_id: str = None,
end_time: int = None,
min_record_id: str = None,
start_time: int = None,
task_id: str = None,
):
self.algorithm_vendor = algorithm_vendor
self.corp_id = corp_id
self.end_time = end_time
self.min_record_id = min_record_id
self.start_time = start_time
self.task_id = task_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_vendor is not None:
result['AlgorithmVendor'] = self.algorithm_vendor
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.min_record_id is not None:
result['MinRecordId'] = self.min_record_id
if self.start_time is not None:
result['StartTime'] = self.start_time
if self.task_id is not None:
result['TaskId'] = self.task_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmVendor') is not None:
self.algorithm_vendor = m.get('AlgorithmVendor')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('MinRecordId') is not None:
self.min_record_id = m.get('MinRecordId')
if m.get('StartTime') is not None:
self.start_time = m.get('StartTime')
if m.get('TaskId') is not None:
self.task_id = m.get('TaskId')
return self
class GetMonitorResultResponseBodyDataRecordsExtendInfo(TeaModel):
def __init__(
self,
plate_no: str = None,
):
self.plate_no = plate_no
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.plate_no is not None:
result['PlateNo'] = self.plate_no
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PlateNo') is not None:
self.plate_no = m.get('PlateNo')
return self
class GetMonitorResultResponseBodyDataRecords(TeaModel):
def __init__(
self,
extend_info: GetMonitorResultResponseBodyDataRecordsExtendInfo = None,
gb_id: str = None,
left_up_x: str = None,
left_up_y: str = None,
monitor_pic_url: str = None,
pic_url: str = None,
right_bottom_x: str = None,
right_bottom_y: str = None,
score: str = None,
shot_time: str = None,
target_pic_url: str = None,
task_id: str = None,
):
self.extend_info = extend_info
self.gb_id = gb_id
self.left_up_x = left_up_x
self.left_up_y = left_up_y
self.monitor_pic_url = monitor_pic_url
self.pic_url = pic_url
self.right_bottom_x = right_bottom_x
self.right_bottom_y = right_bottom_y
self.score = score
self.shot_time = shot_time
self.target_pic_url = target_pic_url
self.task_id = task_id
def validate(self):
if self.extend_info:
self.extend_info.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.extend_info is not None:
result['ExtendInfo'] = self.extend_info.to_map()
if self.gb_id is not None:
result['GbId'] = self.gb_id
if self.left_up_x is not None:
result['LeftUpX'] = self.left_up_x
if self.left_up_y is not None:
result['LeftUpY'] = self.left_up_y
if self.monitor_pic_url is not None:
result['MonitorPicUrl'] = self.monitor_pic_url
if self.pic_url is not None:
result['PicUrl'] = self.pic_url
if self.right_bottom_x is not None:
result['RightBottomX'] = self.right_bottom_x
if self.right_bottom_y is not None:
result['RightBottomY'] = self.right_bottom_y
if self.score is not None:
result['Score'] = self.score
if self.shot_time is not None:
result['ShotTime'] = self.shot_time
if self.target_pic_url is not None:
result['TargetPicUrl'] = self.target_pic_url
if self.task_id is not None:
result['TaskId'] = self.task_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ExtendInfo') is not None:
temp_model = GetMonitorResultResponseBodyDataRecordsExtendInfo()
self.extend_info = temp_model.from_map(m['ExtendInfo'])
if m.get('GbId') is not None:
self.gb_id = m.get('GbId')
if m.get('LeftUpX') is not None:
self.left_up_x = m.get('LeftUpX')
if m.get('LeftUpY') is not None:
self.left_up_y = m.get('LeftUpY')
if m.get('MonitorPicUrl') is not None:
self.monitor_pic_url = m.get('MonitorPicUrl')
if m.get('PicUrl') is not None:
self.pic_url = m.get('PicUrl')
if m.get('RightBottomX') is not None:
self.right_bottom_x = m.get('RightBottomX')
if m.get('RightBottomY') is not None:
self.right_bottom_y = m.get('RightBottomY')
if m.get('Score') is not None:
self.score = m.get('Score')
if m.get('ShotTime') is not None:
self.shot_time = m.get('ShotTime')
if m.get('TargetPicUrl') is not None:
self.target_pic_url = m.get('TargetPicUrl')
if m.get('TaskId') is not None:
self.task_id = m.get('TaskId')
return self
class GetMonitorResultResponseBodyData(TeaModel):
def __init__(
self,
max_id: str = None,
records: List[GetMonitorResultResponseBodyDataRecords] = None,
):
self.max_id = max_id
self.records = records
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.max_id is not None:
result['MaxId'] = self.max_id
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('MaxId') is not None:
self.max_id = m.get('MaxId')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = GetMonitorResultResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
return self
class GetMonitorResultResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: GetMonitorResultResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = GetMonitorResultResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class GetMonitorResultResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetMonitorResultResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetMonitorResultResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetOdpsResultResponseBody(TeaModel):
def __init__(
self,
data: str = None,
success: bool = None,
):
# Id of the request
self.data = data
self.success = success
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.data is not None:
result['Data'] = self.data
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class GetOdpsResultResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetOdpsResultResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetOdpsResultResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetPersonDetailRequest(TeaModel):
def __init__(
self,
algorithm_type: str = None,
corp_id: str = None,
person_id: str = None,
):
self.algorithm_type = algorithm_type
self.corp_id = corp_id
self.person_id = person_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_type is not None:
result['AlgorithmType'] = self.algorithm_type
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.person_id is not None:
result['PersonID'] = self.person_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmType') is not None:
self.algorithm_type = m.get('AlgorithmType')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('PersonID') is not None:
self.person_id = m.get('PersonID')
return self
class GetPersonDetailResponseBodyDataTagList(TeaModel):
def __init__(
self,
tag_code: str = None,
tag_name: str = None,
tag_value: str = None,
tag_value_id: str = None,
):
self.tag_code = tag_code
self.tag_name = tag_name
self.tag_value = tag_value
self.tag_value_id = tag_value_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.tag_code is not None:
result['TagCode'] = self.tag_code
if self.tag_name is not None:
result['TagName'] = self.tag_name
if self.tag_value is not None:
result['TagValue'] = self.tag_value
if self.tag_value_id is not None:
result['TagValueId'] = self.tag_value_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('TagCode') is not None:
self.tag_code = m.get('TagCode')
if m.get('TagName') is not None:
self.tag_name = m.get('TagName')
if m.get('TagValue') is not None:
self.tag_value = m.get('TagValue')
if m.get('TagValueId') is not None:
self.tag_value_id = m.get('TagValueId')
return self
class GetPersonDetailResponseBodyData(TeaModel):
def __init__(
self,
person_id: str = None,
pic_url: str = None,
tag_list: List[GetPersonDetailResponseBodyDataTagList] = None,
):
self.person_id = person_id
self.pic_url = pic_url
self.tag_list = tag_list
def validate(self):
if self.tag_list:
for k in self.tag_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.pic_url is not None:
result['PicUrl'] = self.pic_url
result['TagList'] = []
if self.tag_list is not None:
for k in self.tag_list:
result['TagList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('PicUrl') is not None:
self.pic_url = m.get('PicUrl')
self.tag_list = []
if m.get('TagList') is not None:
for k in m.get('TagList'):
temp_model = GetPersonDetailResponseBodyDataTagList()
self.tag_list.append(temp_model.from_map(k))
return self
class GetPersonDetailResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: GetPersonDetailResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = GetPersonDetailResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class GetPersonDetailResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetPersonDetailResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetPersonDetailResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetPersonListRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
corp_id_list: Dict[str, Any] = None,
face_matching_rate_threshold: str = None,
face_url: str = None,
page_number: int = None,
page_size: int = None,
person_id_list: Dict[str, Any] = None,
):
self.corp_id = corp_id
self.corp_id_list = corp_id_list
self.face_matching_rate_threshold = face_matching_rate_threshold
self.face_url = face_url
self.page_number = page_number
self.page_size = page_size
self.person_id_list = person_id_list
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.corp_id_list is not None:
result['CorpIdList'] = self.corp_id_list
if self.face_matching_rate_threshold is not None:
result['FaceMatchingRateThreshold'] = self.face_matching_rate_threshold
if self.face_url is not None:
result['FaceUrl'] = self.face_url
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.person_id_list is not None:
result['PersonIdList'] = self.person_id_list
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('CorpIdList') is not None:
self.corp_id_list = m.get('CorpIdList')
if m.get('FaceMatchingRateThreshold') is not None:
self.face_matching_rate_threshold = m.get('FaceMatchingRateThreshold')
if m.get('FaceUrl') is not None:
self.face_url = m.get('FaceUrl')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('PersonIdList') is not None:
self.person_id_list = m.get('PersonIdList')
return self
class GetPersonListShrinkRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
corp_id_list_shrink: str = None,
face_matching_rate_threshold: str = None,
face_url: str = None,
page_number: int = None,
page_size: int = None,
person_id_list_shrink: str = None,
):
self.corp_id = corp_id
self.corp_id_list_shrink = corp_id_list_shrink
self.face_matching_rate_threshold = face_matching_rate_threshold
self.face_url = face_url
self.page_number = page_number
self.page_size = page_size
self.person_id_list_shrink = person_id_list_shrink
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.corp_id_list_shrink is not None:
result['CorpIdList'] = self.corp_id_list_shrink
if self.face_matching_rate_threshold is not None:
result['FaceMatchingRateThreshold'] = self.face_matching_rate_threshold
if self.face_url is not None:
result['FaceUrl'] = self.face_url
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.person_id_list_shrink is not None:
result['PersonIdList'] = self.person_id_list_shrink
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('CorpIdList') is not None:
self.corp_id_list_shrink = m.get('CorpIdList')
if m.get('FaceMatchingRateThreshold') is not None:
self.face_matching_rate_threshold = m.get('FaceMatchingRateThreshold')
if m.get('FaceUrl') is not None:
self.face_url = m.get('FaceUrl')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('PersonIdList') is not None:
self.person_id_list_shrink = m.get('PersonIdList')
return self
class GetPersonListResponseBodyDataRecordsPropertyTagList(TeaModel):
def __init__(
self,
code: str = None,
tag_code_name: str = None,
tag_name: str = None,
value: str = None,
):
self.code = code
self.tag_code_name = tag_code_name
self.tag_name = tag_name
self.value = value
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.tag_code_name is not None:
result['TagCodeName'] = self.tag_code_name
if self.tag_name is not None:
result['TagName'] = self.tag_name
if self.value is not None:
result['Value'] = self.value
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('TagCodeName') is not None:
self.tag_code_name = m.get('TagCodeName')
if m.get('TagName') is not None:
self.tag_name = m.get('TagName')
if m.get('Value') is not None:
self.value = m.get('Value')
return self
class GetPersonListResponseBodyDataRecords(TeaModel):
def __init__(
self,
face_url: str = None,
first_shot_time: int = None,
last_shot_time: int = None,
person_id: str = None,
property_tag_list: List[GetPersonListResponseBodyDataRecordsPropertyTagList] = None,
search_matching_rate: str = None,
):
self.face_url = face_url
self.first_shot_time = first_shot_time
self.last_shot_time = last_shot_time
self.person_id = person_id
self.property_tag_list = property_tag_list
self.search_matching_rate = search_matching_rate
def validate(self):
if self.property_tag_list:
for k in self.property_tag_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.face_url is not None:
result['FaceUrl'] = self.face_url
if self.first_shot_time is not None:
result['FirstShotTime'] = self.first_shot_time
if self.last_shot_time is not None:
result['LastShotTime'] = self.last_shot_time
if self.person_id is not None:
result['PersonId'] = self.person_id
result['PropertyTagList'] = []
if self.property_tag_list is not None:
for k in self.property_tag_list:
result['PropertyTagList'].append(k.to_map() if k else None)
if self.search_matching_rate is not None:
result['SearchMatchingRate'] = self.search_matching_rate
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('FaceUrl') is not None:
self.face_url = m.get('FaceUrl')
if m.get('FirstShotTime') is not None:
self.first_shot_time = m.get('FirstShotTime')
if m.get('LastShotTime') is not None:
self.last_shot_time = m.get('LastShotTime')
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
self.property_tag_list = []
if m.get('PropertyTagList') is not None:
for k in m.get('PropertyTagList'):
temp_model = GetPersonListResponseBodyDataRecordsPropertyTagList()
self.property_tag_list.append(temp_model.from_map(k))
if m.get('SearchMatchingRate') is not None:
self.search_matching_rate = m.get('SearchMatchingRate')
return self
class GetPersonListResponseBodyData(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
records: List[GetPersonListResponseBodyDataRecords] = None,
total_count: int = None,
):
self.page_number = page_number
self.page_size = page_size
self.records = records
self.total_count = total_count
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = GetPersonListResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
return self
class GetPersonListResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: GetPersonListResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = GetPersonListResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class GetPersonListResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetPersonListResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetPersonListResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetPictureUrlRequest(TeaModel):
def __init__(
self,
expire_time: str = None,
origin_url: str = None,
protocol: str = None,
):
self.expire_time = expire_time
self.origin_url = origin_url
self.protocol = protocol
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.expire_time is not None:
result['ExpireTime'] = self.expire_time
if self.origin_url is not None:
result['OriginUrl'] = self.origin_url
if self.protocol is not None:
result['Protocol'] = self.protocol
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ExpireTime') is not None:
self.expire_time = m.get('ExpireTime')
if m.get('OriginUrl') is not None:
self.origin_url = m.get('OriginUrl')
if m.get('Protocol') is not None:
self.protocol = m.get('Protocol')
return self
class GetPictureUrlResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
url: str = None,
):
self.code = code
self.message = message
self.request_id = request_id
self.url = url
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.url is not None:
result['Url'] = self.url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Url') is not None:
self.url = m.get('Url')
return self
class GetPictureUrlResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetPictureUrlResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetPictureUrlResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetProfileDetailRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
isv_sub_id: str = None,
profile_id: int = None,
):
self.corp_id = corp_id
self.isv_sub_id = isv_sub_id
self.profile_id = profile_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.profile_id is not None:
result['ProfileId'] = self.profile_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('ProfileId') is not None:
self.profile_id = m.get('ProfileId')
return self
class GetProfileDetailResponseBodyData(TeaModel):
def __init__(
self,
biz_id: str = None,
catalog_id: int = None,
face_url: str = None,
gender: str = None,
id_number: str = None,
isv_sub_id: str = None,
live_address: str = None,
name: str = None,
person_id: str = None,
phone_no: str = None,
plate_no: str = None,
profile_id: int = None,
scene_type: str = None,
):
self.biz_id = biz_id
self.catalog_id = catalog_id
self.face_url = face_url
self.gender = gender
self.id_number = id_number
self.isv_sub_id = isv_sub_id
self.live_address = live_address
self.name = name
self.person_id = person_id
self.phone_no = phone_no
self.plate_no = plate_no
self.profile_id = profile_id
self.scene_type = scene_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.biz_id is not None:
result['BizId'] = self.biz_id
if self.catalog_id is not None:
result['CatalogId'] = self.catalog_id
if self.face_url is not None:
result['FaceUrl'] = self.face_url
if self.gender is not None:
result['Gender'] = self.gender
if self.id_number is not None:
result['IdNumber'] = self.id_number
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.live_address is not None:
result['LiveAddress'] = self.live_address
if self.name is not None:
result['Name'] = self.name
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.phone_no is not None:
result['PhoneNo'] = self.phone_no
if self.plate_no is not None:
result['PlateNo'] = self.plate_no
if self.profile_id is not None:
result['ProfileId'] = self.profile_id
if self.scene_type is not None:
result['SceneType'] = self.scene_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BizId') is not None:
self.biz_id = m.get('BizId')
if m.get('CatalogId') is not None:
self.catalog_id = m.get('CatalogId')
if m.get('FaceUrl') is not None:
self.face_url = m.get('FaceUrl')
if m.get('Gender') is not None:
self.gender = m.get('Gender')
if m.get('IdNumber') is not None:
self.id_number = m.get('IdNumber')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('LiveAddress') is not None:
self.live_address = m.get('LiveAddress')
if m.get('Name') is not None:
self.name = m.get('Name')
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('PhoneNo') is not None:
self.phone_no = m.get('PhoneNo')
if m.get('PlateNo') is not None:
self.plate_no = m.get('PlateNo')
if m.get('ProfileId') is not None:
self.profile_id = m.get('ProfileId')
if m.get('SceneType') is not None:
self.scene_type = m.get('SceneType')
return self
class GetProfileDetailResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: GetProfileDetailResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = GetProfileDetailResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class GetProfileDetailResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetProfileDetailResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetProfileDetailResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetProfileListRequest(TeaModel):
def __init__(
self,
biz_id: str = None,
catalog_id: int = None,
corp_id: str = None,
face_image_id: str = None,
face_url: str = None,
gender: int = None,
id_number: str = None,
isv_sub_id: str = None,
live_address: str = None,
matching_rate_threshold: str = None,
name: str = None,
page_number: int = None,
page_size: int = None,
person_id_list: Dict[str, Any] = None,
phone_no: str = None,
plate_no: str = None,
profile_id_list: Dict[str, Any] = None,
scene_type: str = None,
):
self.biz_id = biz_id
self.catalog_id = catalog_id
self.corp_id = corp_id
self.face_image_id = face_image_id
self.face_url = face_url
self.gender = gender
self.id_number = id_number
self.isv_sub_id = isv_sub_id
self.live_address = live_address
self.matching_rate_threshold = matching_rate_threshold
self.name = name
self.page_number = page_number
self.page_size = page_size
self.person_id_list = person_id_list
self.phone_no = phone_no
self.plate_no = plate_no
self.profile_id_list = profile_id_list
self.scene_type = scene_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.biz_id is not None:
result['BizId'] = self.biz_id
if self.catalog_id is not None:
result['CatalogId'] = self.catalog_id
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.face_image_id is not None:
result['FaceImageId'] = self.face_image_id
if self.face_url is not None:
result['FaceUrl'] = self.face_url
if self.gender is not None:
result['Gender'] = self.gender
if self.id_number is not None:
result['IdNumber'] = self.id_number
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.live_address is not None:
result['LiveAddress'] = self.live_address
if self.matching_rate_threshold is not None:
result['MatchingRateThreshold'] = self.matching_rate_threshold
if self.name is not None:
result['Name'] = self.name
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.person_id_list is not None:
result['PersonIdList'] = self.person_id_list
if self.phone_no is not None:
result['PhoneNo'] = self.phone_no
if self.plate_no is not None:
result['PlateNo'] = self.plate_no
if self.profile_id_list is not None:
result['ProfileIdList'] = self.profile_id_list
if self.scene_type is not None:
result['SceneType'] = self.scene_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BizId') is not None:
self.biz_id = m.get('BizId')
if m.get('CatalogId') is not None:
self.catalog_id = m.get('CatalogId')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('FaceImageId') is not None:
self.face_image_id = m.get('FaceImageId')
if m.get('FaceUrl') is not None:
self.face_url = m.get('FaceUrl')
if m.get('Gender') is not None:
self.gender = m.get('Gender')
if m.get('IdNumber') is not None:
self.id_number = m.get('IdNumber')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('LiveAddress') is not None:
self.live_address = m.get('LiveAddress')
if m.get('MatchingRateThreshold') is not None:
self.matching_rate_threshold = m.get('MatchingRateThreshold')
if m.get('Name') is not None:
self.name = m.get('Name')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('PersonIdList') is not None:
self.person_id_list = m.get('PersonIdList')
if m.get('PhoneNo') is not None:
self.phone_no = m.get('PhoneNo')
if m.get('PlateNo') is not None:
self.plate_no = m.get('PlateNo')
if m.get('ProfileIdList') is not None:
self.profile_id_list = m.get('ProfileIdList')
if m.get('SceneType') is not None:
self.scene_type = m.get('SceneType')
return self
class GetProfileListShrinkRequest(TeaModel):
def __init__(
self,
biz_id: str = None,
catalog_id: int = None,
corp_id: str = None,
face_image_id: str = None,
face_url: str = None,
gender: int = None,
id_number: str = None,
isv_sub_id: str = None,
live_address: str = None,
matching_rate_threshold: str = None,
name: str = None,
page_number: int = None,
page_size: int = None,
person_id_list_shrink: str = None,
phone_no: str = None,
plate_no: str = None,
profile_id_list_shrink: str = None,
scene_type: str = None,
):
self.biz_id = biz_id
self.catalog_id = catalog_id
self.corp_id = corp_id
self.face_image_id = face_image_id
self.face_url = face_url
self.gender = gender
self.id_number = id_number
self.isv_sub_id = isv_sub_id
self.live_address = live_address
self.matching_rate_threshold = matching_rate_threshold
self.name = name
self.page_number = page_number
self.page_size = page_size
self.person_id_list_shrink = person_id_list_shrink
self.phone_no = phone_no
self.plate_no = plate_no
self.profile_id_list_shrink = profile_id_list_shrink
self.scene_type = scene_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.biz_id is not None:
result['BizId'] = self.biz_id
if self.catalog_id is not None:
result['CatalogId'] = self.catalog_id
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.face_image_id is not None:
result['FaceImageId'] = self.face_image_id
if self.face_url is not None:
result['FaceUrl'] = self.face_url
if self.gender is not None:
result['Gender'] = self.gender
if self.id_number is not None:
result['IdNumber'] = self.id_number
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.live_address is not None:
result['LiveAddress'] = self.live_address
if self.matching_rate_threshold is not None:
result['MatchingRateThreshold'] = self.matching_rate_threshold
if self.name is not None:
result['Name'] = self.name
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.person_id_list_shrink is not None:
result['PersonIdList'] = self.person_id_list_shrink
if self.phone_no is not None:
result['PhoneNo'] = self.phone_no
if self.plate_no is not None:
result['PlateNo'] = self.plate_no
if self.profile_id_list_shrink is not None:
result['ProfileIdList'] = self.profile_id_list_shrink
if self.scene_type is not None:
result['SceneType'] = self.scene_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BizId') is not None:
self.biz_id = m.get('BizId')
if m.get('CatalogId') is not None:
self.catalog_id = m.get('CatalogId')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('FaceImageId') is not None:
self.face_image_id = m.get('FaceImageId')
if m.get('FaceUrl') is not None:
self.face_url = m.get('FaceUrl')
if m.get('Gender') is not None:
self.gender = m.get('Gender')
if m.get('IdNumber') is not None:
self.id_number = m.get('IdNumber')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('LiveAddress') is not None:
self.live_address = m.get('LiveAddress')
if m.get('MatchingRateThreshold') is not None:
self.matching_rate_threshold = m.get('MatchingRateThreshold')
if m.get('Name') is not None:
self.name = m.get('Name')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('PersonIdList') is not None:
self.person_id_list_shrink = m.get('PersonIdList')
if m.get('PhoneNo') is not None:
self.phone_no = m.get('PhoneNo')
if m.get('PlateNo') is not None:
self.plate_no = m.get('PlateNo')
if m.get('ProfileIdList') is not None:
self.profile_id_list_shrink = m.get('ProfileIdList')
if m.get('SceneType') is not None:
self.scene_type = m.get('SceneType')
return self
class GetProfileListResponseBodyDataRecords(TeaModel):
def __init__(
self,
biz_id: str = None,
catalog_id: int = None,
face_url: str = None,
gender: str = None,
id_number: str = None,
isv_sub_id: str = None,
name: str = None,
person_id: str = None,
profile_id: int = None,
scene_type: str = None,
search_matching_rate: str = None,
):
self.biz_id = biz_id
self.catalog_id = catalog_id
self.face_url = face_url
self.gender = gender
self.id_number = id_number
self.isv_sub_id = isv_sub_id
self.name = name
self.person_id = person_id
self.profile_id = profile_id
self.scene_type = scene_type
self.search_matching_rate = search_matching_rate
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.biz_id is not None:
result['BizId'] = self.biz_id
if self.catalog_id is not None:
result['CatalogId'] = self.catalog_id
if self.face_url is not None:
result['FaceUrl'] = self.face_url
if self.gender is not None:
result['Gender'] = self.gender
if self.id_number is not None:
result['IdNumber'] = self.id_number
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.name is not None:
result['Name'] = self.name
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.profile_id is not None:
result['ProfileId'] = self.profile_id
if self.scene_type is not None:
result['SceneType'] = self.scene_type
if self.search_matching_rate is not None:
result['SearchMatchingRate'] = self.search_matching_rate
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BizId') is not None:
self.biz_id = m.get('BizId')
if m.get('CatalogId') is not None:
self.catalog_id = m.get('CatalogId')
if m.get('FaceUrl') is not None:
self.face_url = m.get('FaceUrl')
if m.get('Gender') is not None:
self.gender = m.get('Gender')
if m.get('IdNumber') is not None:
self.id_number = m.get('IdNumber')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('Name') is not None:
self.name = m.get('Name')
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('ProfileId') is not None:
self.profile_id = m.get('ProfileId')
if m.get('SceneType') is not None:
self.scene_type = m.get('SceneType')
if m.get('SearchMatchingRate') is not None:
self.search_matching_rate = m.get('SearchMatchingRate')
return self
class GetProfileListResponseBodyData(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
records: List[GetProfileListResponseBodyDataRecords] = None,
success: bool = None,
total: int = None,
):
self.page_number = page_number
self.page_size = page_size
self.records = records
self.success = success
self.total = total
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.success is not None:
result['Success'] = self.success
if self.total is not None:
result['Total'] = self.total
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = GetProfileListResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('Success') is not None:
self.success = m.get('Success')
if m.get('Total') is not None:
self.total = m.get('Total')
return self
class GetProfileListResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: GetProfileListResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = GetProfileListResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class GetProfileListResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetProfileListResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetProfileListResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetScanSubDevicesRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
device_id: str = None,
):
self.corp_id = corp_id
self.device_id = device_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.device_id is not None:
result['DeviceId'] = self.device_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
return self
class GetScanSubDevicesResponseBodyDataSubDeviceList(TeaModel):
def __init__(
self,
associated_nvr: str = None,
device_mac: str = None,
device_model: str = None,
device_sn: str = None,
firmware_version: str = None,
manufacturer: str = None,
sub_device_ip: str = None,
sub_device_port: str = None,
):
self.associated_nvr = associated_nvr
self.device_mac = device_mac
self.device_model = device_model
self.device_sn = device_sn
self.firmware_version = firmware_version
self.manufacturer = manufacturer
self.sub_device_ip = sub_device_ip
self.sub_device_port = sub_device_port
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.associated_nvr is not None:
result['AssociatedNvr'] = self.associated_nvr
if self.device_mac is not None:
result['DeviceMac'] = self.device_mac
if self.device_model is not None:
result['DeviceModel'] = self.device_model
if self.device_sn is not None:
result['DeviceSn'] = self.device_sn
if self.firmware_version is not None:
result['FirmwareVersion'] = self.firmware_version
if self.manufacturer is not None:
result['Manufacturer'] = self.manufacturer
if self.sub_device_ip is not None:
result['SubDeviceIp'] = self.sub_device_ip
if self.sub_device_port is not None:
result['SubDevicePort'] = self.sub_device_port
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AssociatedNvr') is not None:
self.associated_nvr = m.get('AssociatedNvr')
if m.get('DeviceMac') is not None:
self.device_mac = m.get('DeviceMac')
if m.get('DeviceModel') is not None:
self.device_model = m.get('DeviceModel')
if m.get('DeviceSn') is not None:
self.device_sn = m.get('DeviceSn')
if m.get('FirmwareVersion') is not None:
self.firmware_version = m.get('FirmwareVersion')
if m.get('Manufacturer') is not None:
self.manufacturer = m.get('Manufacturer')
if m.get('SubDeviceIp') is not None:
self.sub_device_ip = m.get('SubDeviceIp')
if m.get('SubDevicePort') is not None:
self.sub_device_port = m.get('SubDevicePort')
return self
class GetScanSubDevicesResponseBodyData(TeaModel):
def __init__(
self,
sub_device_list: List[GetScanSubDevicesResponseBodyDataSubDeviceList] = None,
):
self.sub_device_list = sub_device_list
def validate(self):
if self.sub_device_list:
for k in self.sub_device_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['SubDeviceList'] = []
if self.sub_device_list is not None:
for k in self.sub_device_list:
result['SubDeviceList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
self.sub_device_list = []
if m.get('SubDeviceList') is not None:
for k in m.get('SubDeviceList'):
temp_model = GetScanSubDevicesResponseBodyDataSubDeviceList()
self.sub_device_list.append(temp_model.from_map(k))
return self
class GetScanSubDevicesResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: GetScanSubDevicesResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = GetScanSubDevicesResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class GetScanSubDevicesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetScanSubDevicesResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetScanSubDevicesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetUserDetailRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
isv_sub_id: str = None,
need_face_detail: bool = None,
user_id: int = None,
):
self.corp_id = corp_id
self.isv_sub_id = isv_sub_id
self.need_face_detail = need_face_detail
self.user_id = user_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.need_face_detail is not None:
result['NeedFaceDetail'] = self.need_face_detail
if self.user_id is not None:
result['UserId'] = self.user_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('NeedFaceDetail') is not None:
self.need_face_detail = m.get('NeedFaceDetail')
if m.get('UserId') is not None:
self.user_id = m.get('UserId')
return self
class GetUserDetailResponseBodyData(TeaModel):
def __init__(
self,
address: str = None,
age: str = None,
attachment: str = None,
biz_id: str = None,
face_image_url: str = None,
gender: str = None,
id_number: str = None,
isv_sub_id: str = None,
phone_no: str = None,
plate_no: str = None,
user_group_id: int = None,
user_id: int = None,
user_name: str = None,
):
self.address = address
self.age = age
self.attachment = attachment
self.biz_id = biz_id
self.face_image_url = face_image_url
self.gender = gender
self.id_number = id_number
self.isv_sub_id = isv_sub_id
self.phone_no = phone_no
self.plate_no = plate_no
self.user_group_id = user_group_id
self.user_id = user_id
self.user_name = user_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.address is not None:
result['Address'] = self.address
if self.age is not None:
result['Age'] = self.age
if self.attachment is not None:
result['Attachment'] = self.attachment
if self.biz_id is not None:
result['BizId'] = self.biz_id
if self.face_image_url is not None:
result['FaceImageUrl'] = self.face_image_url
if self.gender is not None:
result['Gender'] = self.gender
if self.id_number is not None:
result['IdNumber'] = self.id_number
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.phone_no is not None:
result['PhoneNo'] = self.phone_no
if self.plate_no is not None:
result['PlateNo'] = self.plate_no
if self.user_group_id is not None:
result['UserGroupId'] = self.user_group_id
if self.user_id is not None:
result['UserId'] = self.user_id
if self.user_name is not None:
result['UserName'] = self.user_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Address') is not None:
self.address = m.get('Address')
if m.get('Age') is not None:
self.age = m.get('Age')
if m.get('Attachment') is not None:
self.attachment = m.get('Attachment')
if m.get('BizId') is not None:
self.biz_id = m.get('BizId')
if m.get('FaceImageUrl') is not None:
self.face_image_url = m.get('FaceImageUrl')
if m.get('Gender') is not None:
self.gender = m.get('Gender')
if m.get('IdNumber') is not None:
self.id_number = m.get('IdNumber')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('PhoneNo') is not None:
self.phone_no = m.get('PhoneNo')
if m.get('PlateNo') is not None:
self.plate_no = m.get('PlateNo')
if m.get('UserGroupId') is not None:
self.user_group_id = m.get('UserGroupId')
if m.get('UserId') is not None:
self.user_id = m.get('UserId')
if m.get('UserName') is not None:
self.user_name = m.get('UserName')
return self
class GetUserDetailResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: GetUserDetailResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = GetUserDetailResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class GetUserDetailResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetUserDetailResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetUserDetailResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetVideoComposeResultRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
task_request_id: str = None,
):
self.corp_id = corp_id
self.task_request_id = task_request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.task_request_id is not None:
result['TaskRequestId'] = self.task_request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('TaskRequestId') is not None:
self.task_request_id = m.get('TaskRequestId')
return self
class GetVideoComposeResultResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
status: str = None,
video_url: str = None,
):
self.code = code
self.message = message
self.request_id = request_id
self.status = status
self.video_url = video_url
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.status is not None:
result['Status'] = self.status
if self.video_url is not None:
result['VideoUrl'] = self.video_url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('VideoUrl') is not None:
self.video_url = m.get('VideoUrl')
return self
class GetVideoComposeResultResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetVideoComposeResultResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetVideoComposeResultResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetVideoSummaryTaskResultRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
task_id: str = None,
):
self.corp_id = corp_id
self.task_id = task_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.task_id is not None:
result['TaskId'] = self.task_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('TaskId') is not None:
self.task_id = m.get('TaskId')
return self
class GetVideoSummaryTaskResultResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class GetVideoSummaryTaskResultResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: GetVideoSummaryTaskResultResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = GetVideoSummaryTaskResultResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class InvokeMotorModelRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
pic_id: str = None,
pic_path: str = None,
pic_url: str = None,
):
self.corp_id = corp_id
self.pic_id = pic_id
self.pic_path = pic_path
self.pic_url = pic_url
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.pic_id is not None:
result['PicId'] = self.pic_id
if self.pic_path is not None:
result['PicPath'] = self.pic_path
if self.pic_url is not None:
result['PicUrl'] = self.pic_url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('PicId') is not None:
self.pic_id = m.get('PicId')
if m.get('PicPath') is not None:
self.pic_path = m.get('PicPath')
if m.get('PicUrl') is not None:
self.pic_url = m.get('PicUrl')
return self
class InvokeMotorModelResponseBodyData(TeaModel):
def __init__(
self,
struct_list: str = None,
):
self.struct_list = struct_list
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.struct_list is not None:
result['StructList'] = self.struct_list
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('StructList') is not None:
self.struct_list = m.get('StructList')
return self
class InvokeMotorModelResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: InvokeMotorModelResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = InvokeMotorModelResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class InvokeMotorModelResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: InvokeMotorModelResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = InvokeMotorModelResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListAccessNumberRequest(TeaModel):
def __init__(
self,
corp_id_list: str = None,
):
self.corp_id_list = corp_id_list
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id_list is not None:
result['CorpIdList'] = self.corp_id_list
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpIdList') is not None:
self.corp_id_list = m.get('CorpIdList')
return self
class ListAccessNumberResponseBodyData(TeaModel):
def __init__(
self,
count: str = None,
item: str = None,
percent: str = None,
):
self.count = count
self.item = item
self.percent = percent
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.count is not None:
result['Count'] = self.count
if self.item is not None:
result['Item'] = self.item
if self.percent is not None:
result['Percent'] = self.percent
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Count') is not None:
self.count = m.get('Count')
if m.get('Item') is not None:
self.item = m.get('Item')
if m.get('Percent') is not None:
self.percent = m.get('Percent')
return self
class ListAccessNumberResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: List[ListAccessNumberResponseBodyData] = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
if self.data:
for k in self.data:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
result['Data'] = []
if self.data is not None:
for k in self.data:
result['Data'].append(k.to_map() if k else None)
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
self.data = []
if m.get('Data') is not None:
for k in m.get('Data'):
temp_model = ListAccessNumberResponseBodyData()
self.data.append(temp_model.from_map(k))
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ListAccessNumberResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListAccessNumberResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListAccessNumberResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListAlgorithmNamesByDeviceIdsRequest(TeaModel):
def __init__(
self,
gb_ids: str = None,
):
# 设备ID集合,多个的话以逗号分隔,最大支持200个
self.gb_ids = gb_ids
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.gb_ids is not None:
result['GbIds'] = self.gb_ids
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('GbIds') is not None:
self.gb_ids = m.get('GbIds')
return self
class ListAlgorithmNamesByDeviceIdsResponseBodyData(TeaModel):
def __init__(
self,
algorithm_names: List[str] = None,
gb_id: str = None,
):
# 设备ID对应呗计算的算法名称集合
self.algorithm_names = algorithm_names
# 设备ID
self.gb_id = gb_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_names is not None:
result['AlgorithmNames'] = self.algorithm_names
if self.gb_id is not None:
result['GbId'] = self.gb_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmNames') is not None:
self.algorithm_names = m.get('AlgorithmNames')
if m.get('GbId') is not None:
self.gb_id = m.get('GbId')
return self
class ListAlgorithmNamesByDeviceIdsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: List[ListAlgorithmNamesByDeviceIdsResponseBodyData] = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
# 返回结果编码 200:获取成功 Failed:获取失败 Retry:按需拉流模式,正在建立取流连接,请稍后重试
self.code = code
self.data = data
# 返回结果消息
self.message = message
# 请求ID
self.request_id = request_id
self.success = success
def validate(self):
if self.data:
for k in self.data:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
result['Data'] = []
if self.data is not None:
for k in self.data:
result['Data'].append(k.to_map() if k else None)
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
self.data = []
if m.get('Data') is not None:
for k in m.get('Data'):
temp_model = ListAlgorithmNamesByDeviceIdsResponseBodyData()
self.data.append(temp_model.from_map(k))
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class ListAlgorithmNamesByDeviceIdsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListAlgorithmNamesByDeviceIdsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListAlgorithmNamesByDeviceIdsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListBodyAlgorithmResultsRequest(TeaModel):
def __init__(
self,
algorithm_type: str = None,
cap_style: str = None,
corp_id: str = None,
data_source_id: str = None,
end_time: str = None,
page_number: str = None,
page_size: str = None,
start_time: str = None,
):
self.algorithm_type = algorithm_type
self.cap_style = cap_style
self.corp_id = corp_id
self.data_source_id = data_source_id
self.end_time = end_time
self.page_number = page_number
self.page_size = page_size
self.start_time = start_time
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_type is not None:
result['AlgorithmType'] = self.algorithm_type
if self.cap_style is not None:
result['CapStyle'] = self.cap_style
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.data_source_id is not None:
result['DataSourceId'] = self.data_source_id
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.start_time is not None:
result['StartTime'] = self.start_time
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmType') is not None:
self.algorithm_type = m.get('AlgorithmType')
if m.get('CapStyle') is not None:
self.cap_style = m.get('CapStyle')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DataSourceId') is not None:
self.data_source_id = m.get('DataSourceId')
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('StartTime') is not None:
self.start_time = m.get('StartTime')
return self
class ListBodyAlgorithmResultsResponseBodyDataRecords(TeaModel):
def __init__(
self,
cap_style: str = None,
coat_color: str = None,
coat_length: str = None,
coat_style: str = None,
corp_id: str = None,
data_source_id: str = None,
gender_code: str = None,
hair_style: str = None,
left_top_x: float = None,
left_top_y: float = None,
max_age: str = None,
min_age: str = None,
person_id: str = None,
pic_url_path: str = None,
right_bottom_x: float = None,
right_bottom_y: float = None,
shot_time: str = None,
source_id: str = None,
target_pic_url_path: str = None,
trousers_color: str = None,
trousers_length: str = None,
trousers_style: str = None,
):
self.cap_style = cap_style
self.coat_color = coat_color
self.coat_length = coat_length
self.coat_style = coat_style
self.corp_id = corp_id
self.data_source_id = data_source_id
self.gender_code = gender_code
self.hair_style = hair_style
self.left_top_x = left_top_x
self.left_top_y = left_top_y
self.max_age = max_age
self.min_age = min_age
self.person_id = person_id
self.pic_url_path = pic_url_path
self.right_bottom_x = right_bottom_x
self.right_bottom_y = right_bottom_y
self.shot_time = shot_time
self.source_id = source_id
self.target_pic_url_path = target_pic_url_path
self.trousers_color = trousers_color
self.trousers_length = trousers_length
self.trousers_style = trousers_style
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.cap_style is not None:
result['CapStyle'] = self.cap_style
if self.coat_color is not None:
result['CoatColor'] = self.coat_color
if self.coat_length is not None:
result['CoatLength'] = self.coat_length
if self.coat_style is not None:
result['CoatStyle'] = self.coat_style
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.data_source_id is not None:
result['DataSourceId'] = self.data_source_id
if self.gender_code is not None:
result['GenderCode'] = self.gender_code
if self.hair_style is not None:
result['HairStyle'] = self.hair_style
if self.left_top_x is not None:
result['LeftTopX'] = self.left_top_x
if self.left_top_y is not None:
result['LeftTopY'] = self.left_top_y
if self.max_age is not None:
result['MaxAge'] = self.max_age
if self.min_age is not None:
result['MinAge'] = self.min_age
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.pic_url_path is not None:
result['PicUrlPath'] = self.pic_url_path
if self.right_bottom_x is not None:
result['RightBottomX'] = self.right_bottom_x
if self.right_bottom_y is not None:
result['RightBottomY'] = self.right_bottom_y
if self.shot_time is not None:
result['ShotTime'] = self.shot_time
if self.source_id is not None:
result['SourceId'] = self.source_id
if self.target_pic_url_path is not None:
result['TargetPicUrlPath'] = self.target_pic_url_path
if self.trousers_color is not None:
result['TrousersColor'] = self.trousers_color
if self.trousers_length is not None:
result['TrousersLength'] = self.trousers_length
if self.trousers_style is not None:
result['TrousersStyle'] = self.trousers_style
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CapStyle') is not None:
self.cap_style = m.get('CapStyle')
if m.get('CoatColor') is not None:
self.coat_color = m.get('CoatColor')
if m.get('CoatLength') is not None:
self.coat_length = m.get('CoatLength')
if m.get('CoatStyle') is not None:
self.coat_style = m.get('CoatStyle')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DataSourceId') is not None:
self.data_source_id = m.get('DataSourceId')
if m.get('GenderCode') is not None:
self.gender_code = m.get('GenderCode')
if m.get('HairStyle') is not None:
self.hair_style = m.get('HairStyle')
if m.get('LeftTopX') is not None:
self.left_top_x = m.get('LeftTopX')
if m.get('LeftTopY') is not None:
self.left_top_y = m.get('LeftTopY')
if m.get('MaxAge') is not None:
self.max_age = m.get('MaxAge')
if m.get('MinAge') is not None:
self.min_age = m.get('MinAge')
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('PicUrlPath') is not None:
self.pic_url_path = m.get('PicUrlPath')
if m.get('RightBottomX') is not None:
self.right_bottom_x = m.get('RightBottomX')
if m.get('RightBottomY') is not None:
self.right_bottom_y = m.get('RightBottomY')
if m.get('ShotTime') is not None:
self.shot_time = m.get('ShotTime')
if m.get('SourceId') is not None:
self.source_id = m.get('SourceId')
if m.get('TargetPicUrlPath') is not None:
self.target_pic_url_path = m.get('TargetPicUrlPath')
if m.get('TrousersColor') is not None:
self.trousers_color = m.get('TrousersColor')
if m.get('TrousersLength') is not None:
self.trousers_length = m.get('TrousersLength')
if m.get('TrousersStyle') is not None:
self.trousers_style = m.get('TrousersStyle')
return self
class ListBodyAlgorithmResultsResponseBodyData(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
records: List[ListBodyAlgorithmResultsResponseBodyDataRecords] = None,
total_count: int = None,
total_page: int = None,
):
self.page_number = page_number
self.page_size = page_size
self.records = records
self.total_count = total_count
self.total_page = total_page
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.total_page is not None:
result['TotalPage'] = self.total_page
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = ListBodyAlgorithmResultsResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('TotalPage') is not None:
self.total_page = m.get('TotalPage')
return self
class ListBodyAlgorithmResultsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: ListBodyAlgorithmResultsResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = ListBodyAlgorithmResultsResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ListBodyAlgorithmResultsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListBodyAlgorithmResultsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListBodyAlgorithmResultsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListCorpGroupMetricsRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
device_group: str = None,
device_id: str = None,
end_time: str = None,
group_id: str = None,
page_number: str = None,
page_size: str = None,
start_time: str = None,
tag_code: str = None,
user_group: str = None,
):
self.corp_id = corp_id
self.device_group = device_group
self.device_id = device_id
self.end_time = end_time
self.group_id = group_id
self.page_number = page_number
self.page_size = page_size
self.start_time = start_time
self.tag_code = tag_code
self.user_group = user_group
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.device_group is not None:
result['DeviceGroup'] = self.device_group
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.group_id is not None:
result['GroupId'] = self.group_id
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.start_time is not None:
result['StartTime'] = self.start_time
if self.tag_code is not None:
result['TagCode'] = self.tag_code
if self.user_group is not None:
result['UserGroup'] = self.user_group
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DeviceGroup') is not None:
self.device_group = m.get('DeviceGroup')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('GroupId') is not None:
self.group_id = m.get('GroupId')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('StartTime') is not None:
self.start_time = m.get('StartTime')
if m.get('TagCode') is not None:
self.tag_code = m.get('TagCode')
if m.get('UserGroup') is not None:
self.user_group = m.get('UserGroup')
return self
class ListCorpGroupMetricsResponseBodyData(TeaModel):
def __init__(
self,
corp_group_id: str = None,
corp_id: str = None,
date_id: str = None,
device_group_id: str = None,
device_id: str = None,
person_id: str = None,
tag_code: str = None,
tag_metrics: str = None,
tag_value: str = None,
user_group_id: str = None,
):
self.corp_group_id = corp_group_id
self.corp_id = corp_id
self.date_id = date_id
self.device_group_id = device_group_id
self.device_id = device_id
self.person_id = person_id
self.tag_code = tag_code
self.tag_metrics = tag_metrics
self.tag_value = tag_value
self.user_group_id = user_group_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_group_id is not None:
result['CorpGroupId'] = self.corp_group_id
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.date_id is not None:
result['DateId'] = self.date_id
if self.device_group_id is not None:
result['DeviceGroupId'] = self.device_group_id
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.person_id is not None:
result['PersonID'] = self.person_id
if self.tag_code is not None:
result['TagCode'] = self.tag_code
if self.tag_metrics is not None:
result['TagMetrics'] = self.tag_metrics
if self.tag_value is not None:
result['TagValue'] = self.tag_value
if self.user_group_id is not None:
result['UserGroupId'] = self.user_group_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpGroupId') is not None:
self.corp_group_id = m.get('CorpGroupId')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DateId') is not None:
self.date_id = m.get('DateId')
if m.get('DeviceGroupId') is not None:
self.device_group_id = m.get('DeviceGroupId')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('PersonID') is not None:
self.person_id = m.get('PersonID')
if m.get('TagCode') is not None:
self.tag_code = m.get('TagCode')
if m.get('TagMetrics') is not None:
self.tag_metrics = m.get('TagMetrics')
if m.get('TagValue') is not None:
self.tag_value = m.get('TagValue')
if m.get('UserGroupId') is not None:
self.user_group_id = m.get('UserGroupId')
return self
class ListCorpGroupMetricsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: List[ListCorpGroupMetricsResponseBodyData] = None,
message: str = None,
page_number: int = None,
page_size: int = None,
request_id: str = None,
success: str = None,
total_count: int = None,
):
self.code = code
self.data = data
self.message = message
self.page_number = page_number
self.page_size = page_size
self.request_id = request_id
self.success = success
self.total_count = total_count
def validate(self):
if self.data:
for k in self.data:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
result['Data'] = []
if self.data is not None:
for k in self.data:
result['Data'].append(k.to_map() if k else None)
if self.message is not None:
result['Message'] = self.message
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
if self.total_count is not None:
result['TotalCount'] = self.total_count
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
self.data = []
if m.get('Data') is not None:
for k in m.get('Data'):
temp_model = ListCorpGroupMetricsResponseBodyData()
self.data.append(temp_model.from_map(k))
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
return self
class ListCorpGroupMetricsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListCorpGroupMetricsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListCorpGroupMetricsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListCorpGroupsRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
page_number: int = None,
page_size: int = None,
):
self.corp_id = corp_id
self.page_number = page_number
self.page_size = page_size
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
return self
class ListCorpGroupsResponseBodyData(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
records: List[str] = None,
total_count: int = None,
total_page: int = None,
):
self.page_number = page_number
self.page_size = page_size
self.records = records
self.total_count = total_count
self.total_page = total_page
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.records is not None:
result['Records'] = self.records
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.total_page is not None:
result['TotalPage'] = self.total_page
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('Records') is not None:
self.records = m.get('Records')
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('TotalPage') is not None:
self.total_page = m.get('TotalPage')
return self
class ListCorpGroupsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: ListCorpGroupsResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = ListCorpGroupsResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ListCorpGroupsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListCorpGroupsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListCorpGroupsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListCorpMetricsRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
device_group_list: str = None,
device_id_list: str = None,
end_time: str = None,
page_number: str = None,
page_size: str = None,
start_time: str = None,
tag_code: str = None,
user_group_list: str = None,
):
self.corp_id = corp_id
self.device_group_list = device_group_list
self.device_id_list = device_id_list
self.end_time = end_time
self.page_number = page_number
self.page_size = page_size
self.start_time = start_time
self.tag_code = tag_code
self.user_group_list = user_group_list
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.device_group_list is not None:
result['DeviceGroupList'] = self.device_group_list
if self.device_id_list is not None:
result['DeviceIdList'] = self.device_id_list
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.start_time is not None:
result['StartTime'] = self.start_time
if self.tag_code is not None:
result['TagCode'] = self.tag_code
if self.user_group_list is not None:
result['UserGroupList'] = self.user_group_list
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DeviceGroupList') is not None:
self.device_group_list = m.get('DeviceGroupList')
if m.get('DeviceIdList') is not None:
self.device_id_list = m.get('DeviceIdList')
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('StartTime') is not None:
self.start_time = m.get('StartTime')
if m.get('TagCode') is not None:
self.tag_code = m.get('TagCode')
if m.get('UserGroupList') is not None:
self.user_group_list = m.get('UserGroupList')
return self
class ListCorpMetricsResponseBodyData(TeaModel):
def __init__(
self,
corp_id: str = None,
date_id: str = None,
device_group_id: str = None,
device_id: str = None,
person_id: str = None,
tag_code: str = None,
tag_metrics: str = None,
tag_value: str = None,
user_group_id: str = None,
):
self.corp_id = corp_id
self.date_id = date_id
self.device_group_id = device_group_id
self.device_id = device_id
self.person_id = person_id
self.tag_code = tag_code
self.tag_metrics = tag_metrics
self.tag_value = tag_value
self.user_group_id = user_group_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.date_id is not None:
result['DateId'] = self.date_id
if self.device_group_id is not None:
result['DeviceGroupId'] = self.device_group_id
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.tag_code is not None:
result['TagCode'] = self.tag_code
if self.tag_metrics is not None:
result['TagMetrics'] = self.tag_metrics
if self.tag_value is not None:
result['TagValue'] = self.tag_value
if self.user_group_id is not None:
result['UserGroupId'] = self.user_group_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DateId') is not None:
self.date_id = m.get('DateId')
if m.get('DeviceGroupId') is not None:
self.device_group_id = m.get('DeviceGroupId')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('TagCode') is not None:
self.tag_code = m.get('TagCode')
if m.get('TagMetrics') is not None:
self.tag_metrics = m.get('TagMetrics')
if m.get('TagValue') is not None:
self.tag_value = m.get('TagValue')
if m.get('UserGroupId') is not None:
self.user_group_id = m.get('UserGroupId')
return self
class ListCorpMetricsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: List[ListCorpMetricsResponseBodyData] = None,
message: str = None,
page_number: int = None,
page_size: int = None,
request_id: str = None,
success: str = None,
total_count: int = None,
):
self.code = code
self.data = data
self.message = message
self.page_number = page_number
self.page_size = page_size
self.request_id = request_id
self.success = success
self.total_count = total_count
def validate(self):
if self.data:
for k in self.data:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
result['Data'] = []
if self.data is not None:
for k in self.data:
result['Data'].append(k.to_map() if k else None)
if self.message is not None:
result['Message'] = self.message
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
if self.total_count is not None:
result['TotalCount'] = self.total_count
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
self.data = []
if m.get('Data') is not None:
for k in m.get('Data'):
temp_model = ListCorpMetricsResponseBodyData()
self.data.append(temp_model.from_map(k))
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
return self
class ListCorpMetricsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListCorpMetricsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListCorpMetricsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListCorpsRequest(TeaModel):
def __init__(
self,
corp_name: str = None,
page_number: int = None,
page_size: int = None,
):
self.corp_name = corp_name
# 页码
self.page_number = page_number
# 分页大小
self.page_size = page_size
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_name is not None:
result['CorpName'] = self.corp_name
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpName') is not None:
self.corp_name = m.get('CorpName')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
return self
class ListCorpsResponseBodyDataRecords(TeaModel):
def __init__(
self,
acu_used: int = None,
app_name: str = None,
corp_id: str = None,
corp_name: str = None,
create_date: str = None,
description: str = None,
device_count: int = None,
icon_path: str = None,
isv_sub_id: str = None,
parent_corp_id: str = None,
yoffline_acu_used: float = None,
):
self.acu_used = acu_used
self.app_name = app_name
self.corp_id = corp_id
self.corp_name = corp_name
self.create_date = create_date
self.description = description
self.device_count = device_count
self.icon_path = icon_path
self.isv_sub_id = isv_sub_id
self.parent_corp_id = parent_corp_id
self.yoffline_acu_used = yoffline_acu_used
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.acu_used is not None:
result['AcuUsed'] = self.acu_used
if self.app_name is not None:
result['AppName'] = self.app_name
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.corp_name is not None:
result['CorpName'] = self.corp_name
if self.create_date is not None:
result['CreateDate'] = self.create_date
if self.description is not None:
result['Description'] = self.description
if self.device_count is not None:
result['DeviceCount'] = self.device_count
if self.icon_path is not None:
result['IconPath'] = self.icon_path
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.parent_corp_id is not None:
result['ParentCorpId'] = self.parent_corp_id
if self.yoffline_acu_used is not None:
result['YOfflineAcuUsed'] = self.yoffline_acu_used
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AcuUsed') is not None:
self.acu_used = m.get('AcuUsed')
if m.get('AppName') is not None:
self.app_name = m.get('AppName')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('CorpName') is not None:
self.corp_name = m.get('CorpName')
if m.get('CreateDate') is not None:
self.create_date = m.get('CreateDate')
if m.get('Description') is not None:
self.description = m.get('Description')
if m.get('DeviceCount') is not None:
self.device_count = m.get('DeviceCount')
if m.get('IconPath') is not None:
self.icon_path = m.get('IconPath')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('ParentCorpId') is not None:
self.parent_corp_id = m.get('ParentCorpId')
if m.get('YOfflineAcuUsed') is not None:
self.yoffline_acu_used = m.get('YOfflineAcuUsed')
return self
class ListCorpsResponseBodyData(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
records: List[ListCorpsResponseBodyDataRecords] = None,
total_count: int = None,
total_page: int = None,
):
self.page_number = page_number
self.page_size = page_size
self.records = records
self.total_count = total_count
self.total_page = total_page
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.total_page is not None:
result['TotalPage'] = self.total_page
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = ListCorpsResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('TotalPage') is not None:
self.total_page = m.get('TotalPage')
return self
class ListCorpsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: ListCorpsResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = ListCorpsResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ListCorpsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListCorpsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListCorpsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListDeviceGroupsRequest(TeaModel):
def __init__(
self,
corp_id_list: str = None,
data_source_type: str = None,
device_code_list: str = None,
group: str = None,
is_page: int = None,
name: str = None,
page_num: int = None,
page_size: int = None,
):
self.corp_id_list = corp_id_list
self.data_source_type = data_source_type
self.device_code_list = device_code_list
self.group = group
self.is_page = is_page
self.name = name
self.page_num = page_num
self.page_size = page_size
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id_list is not None:
result['CorpIdList'] = self.corp_id_list
if self.data_source_type is not None:
result['DataSourceType'] = self.data_source_type
if self.device_code_list is not None:
result['DeviceCodeList'] = self.device_code_list
if self.group is not None:
result['Group'] = self.group
if self.is_page is not None:
result['IsPage'] = self.is_page
if self.name is not None:
result['Name'] = self.name
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpIdList') is not None:
self.corp_id_list = m.get('CorpIdList')
if m.get('DataSourceType') is not None:
self.data_source_type = m.get('DataSourceType')
if m.get('DeviceCodeList') is not None:
self.device_code_list = m.get('DeviceCodeList')
if m.get('Group') is not None:
self.group = m.get('Group')
if m.get('IsPage') is not None:
self.is_page = m.get('IsPage')
if m.get('Name') is not None:
self.name = m.get('Name')
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
return self
class ListDeviceGroupsResponseBodyDataList(TeaModel):
def __init__(
self,
bit_rate: str = None,
coding_format: str = None,
data_source_type: str = None,
device_code: str = None,
device_compute_status: str = None,
device_group: str = None,
device_name: str = None,
device_sn: str = None,
device_status: str = None,
device_stream_status: str = None,
install_address: str = None,
region_id: str = None,
region_name: str = None,
resolving_power: str = None,
type: str = None,
):
self.bit_rate = bit_rate
self.coding_format = coding_format
self.data_source_type = data_source_type
self.device_code = device_code
self.device_compute_status = device_compute_status
self.device_group = device_group
self.device_name = device_name
self.device_sn = device_sn
self.device_status = device_status
self.device_stream_status = device_stream_status
self.install_address = install_address
self.region_id = region_id
self.region_name = region_name
self.resolving_power = resolving_power
self.type = type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.bit_rate is not None:
result['BitRate'] = self.bit_rate
if self.coding_format is not None:
result['CodingFormat'] = self.coding_format
if self.data_source_type is not None:
result['DataSourceType'] = self.data_source_type
if self.device_code is not None:
result['DeviceCode'] = self.device_code
if self.device_compute_status is not None:
result['DeviceComputeStatus'] = self.device_compute_status
if self.device_group is not None:
result['DeviceGroup'] = self.device_group
if self.device_name is not None:
result['DeviceName'] = self.device_name
if self.device_sn is not None:
result['DeviceSn'] = self.device_sn
if self.device_status is not None:
result['DeviceStatus'] = self.device_status
if self.device_stream_status is not None:
result['DeviceStreamStatus'] = self.device_stream_status
if self.install_address is not None:
result['InstallAddress'] = self.install_address
if self.region_id is not None:
result['RegionId'] = self.region_id
if self.region_name is not None:
result['RegionName'] = self.region_name
if self.resolving_power is not None:
result['ResolvingPower'] = self.resolving_power
if self.type is not None:
result['Type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BitRate') is not None:
self.bit_rate = m.get('BitRate')
if m.get('CodingFormat') is not None:
self.coding_format = m.get('CodingFormat')
if m.get('DataSourceType') is not None:
self.data_source_type = m.get('DataSourceType')
if m.get('DeviceCode') is not None:
self.device_code = m.get('DeviceCode')
if m.get('DeviceComputeStatus') is not None:
self.device_compute_status = m.get('DeviceComputeStatus')
if m.get('DeviceGroup') is not None:
self.device_group = m.get('DeviceGroup')
if m.get('DeviceName') is not None:
self.device_name = m.get('DeviceName')
if m.get('DeviceSn') is not None:
self.device_sn = m.get('DeviceSn')
if m.get('DeviceStatus') is not None:
self.device_status = m.get('DeviceStatus')
if m.get('DeviceStreamStatus') is not None:
self.device_stream_status = m.get('DeviceStreamStatus')
if m.get('InstallAddress') is not None:
self.install_address = m.get('InstallAddress')
if m.get('RegionId') is not None:
self.region_id = m.get('RegionId')
if m.get('RegionName') is not None:
self.region_name = m.get('RegionName')
if m.get('ResolvingPower') is not None:
self.resolving_power = m.get('ResolvingPower')
if m.get('Type') is not None:
self.type = m.get('Type')
return self
class ListDeviceGroupsResponseBodyData(TeaModel):
def __init__(
self,
list: List[ListDeviceGroupsResponseBodyDataList] = None,
total_count: str = None,
):
self.list = list
self.total_count = total_count
def validate(self):
if self.list:
for k in self.list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['List'] = []
if self.list is not None:
for k in self.list:
result['List'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
return result
def from_map(self, m: dict = None):
m = m or dict()
self.list = []
if m.get('List') is not None:
for k in m.get('List'):
temp_model = ListDeviceGroupsResponseBodyDataList()
self.list.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
return self
class ListDeviceGroupsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: List[ListDeviceGroupsResponseBodyData] = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
for k in self.data:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
result['Data'] = []
if self.data is not None:
for k in self.data:
result['Data'].append(k.to_map() if k else None)
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
self.data = []
if m.get('Data') is not None:
for k in m.get('Data'):
temp_model = ListDeviceGroupsResponseBodyData()
self.data.append(temp_model.from_map(k))
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ListDeviceGroupsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListDeviceGroupsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListDeviceGroupsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListDevicesRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
device_name: str = None,
gb_id: str = None,
page_number: int = None,
page_size: int = None,
):
self.corp_id = corp_id
self.device_name = device_name
self.gb_id = gb_id
self.page_number = page_number
self.page_size = page_size
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.device_name is not None:
result['DeviceName'] = self.device_name
if self.gb_id is not None:
result['GbId'] = self.gb_id
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DeviceName') is not None:
self.device_name = m.get('DeviceName')
if m.get('GbId') is not None:
self.gb_id = m.get('GbId')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
return self
class ListDevicesResponseBodyDataRecords(TeaModel):
def __init__(
self,
access_protocol_type: str = None,
bit_rate: str = None,
cover_image_url: str = None,
create_time: str = None,
device_address: str = None,
device_direction: str = None,
device_name: str = None,
device_site: str = None,
device_type: str = None,
gb_id: str = None,
latitude: str = None,
longitude: str = None,
resolution: str = None,
sip_gbid: str = None,
sip_password: str = None,
sip_server_ip: str = None,
sip_server_port: str = None,
status: int = None,
vendor: str = None,
):
self.access_protocol_type = access_protocol_type
self.bit_rate = bit_rate
self.cover_image_url = cover_image_url
self.create_time = create_time
self.device_address = device_address
self.device_direction = device_direction
self.device_name = device_name
self.device_site = device_site
self.device_type = device_type
self.gb_id = gb_id
self.latitude = latitude
self.longitude = longitude
self.resolution = resolution
self.sip_gbid = sip_gbid
self.sip_password = sip_password
self.sip_server_ip = sip_server_ip
self.sip_server_port = sip_server_port
self.status = status
self.vendor = vendor
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.access_protocol_type is not None:
result['AccessProtocolType'] = self.access_protocol_type
if self.bit_rate is not None:
result['BitRate'] = self.bit_rate
if self.cover_image_url is not None:
result['CoverImageUrl'] = self.cover_image_url
if self.create_time is not None:
result['CreateTime'] = self.create_time
if self.device_address is not None:
result['DeviceAddress'] = self.device_address
if self.device_direction is not None:
result['DeviceDirection'] = self.device_direction
if self.device_name is not None:
result['DeviceName'] = self.device_name
if self.device_site is not None:
result['DeviceSite'] = self.device_site
if self.device_type is not None:
result['DeviceType'] = self.device_type
if self.gb_id is not None:
result['GbId'] = self.gb_id
if self.latitude is not None:
result['Latitude'] = self.latitude
if self.longitude is not None:
result['Longitude'] = self.longitude
if self.resolution is not None:
result['Resolution'] = self.resolution
if self.sip_gbid is not None:
result['SipGBId'] = self.sip_gbid
if self.sip_password is not None:
result['SipPassword'] = self.sip_password
if self.sip_server_ip is not None:
result['SipServerIp'] = self.sip_server_ip
if self.sip_server_port is not None:
result['SipServerPort'] = self.sip_server_port
if self.status is not None:
result['Status'] = self.status
if self.vendor is not None:
result['Vendor'] = self.vendor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AccessProtocolType') is not None:
self.access_protocol_type = m.get('AccessProtocolType')
if m.get('BitRate') is not None:
self.bit_rate = m.get('BitRate')
if m.get('CoverImageUrl') is not None:
self.cover_image_url = m.get('CoverImageUrl')
if m.get('CreateTime') is not None:
self.create_time = m.get('CreateTime')
if m.get('DeviceAddress') is not None:
self.device_address = m.get('DeviceAddress')
if m.get('DeviceDirection') is not None:
self.device_direction = m.get('DeviceDirection')
if m.get('DeviceName') is not None:
self.device_name = m.get('DeviceName')
if m.get('DeviceSite') is not None:
self.device_site = m.get('DeviceSite')
if m.get('DeviceType') is not None:
self.device_type = m.get('DeviceType')
if m.get('GbId') is not None:
self.gb_id = m.get('GbId')
if m.get('Latitude') is not None:
self.latitude = m.get('Latitude')
if m.get('Longitude') is not None:
self.longitude = m.get('Longitude')
if m.get('Resolution') is not None:
self.resolution = m.get('Resolution')
if m.get('SipGBId') is not None:
self.sip_gbid = m.get('SipGBId')
if m.get('SipPassword') is not None:
self.sip_password = m.get('SipPassword')
if m.get('SipServerIp') is not None:
self.sip_server_ip = m.get('SipServerIp')
if m.get('SipServerPort') is not None:
self.sip_server_port = m.get('SipServerPort')
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('Vendor') is not None:
self.vendor = m.get('Vendor')
return self
class ListDevicesResponseBodyData(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
records: List[ListDevicesResponseBodyDataRecords] = None,
total_count: int = None,
total_page: int = None,
):
self.page_number = page_number
self.page_size = page_size
self.records = records
self.total_count = total_count
self.total_page = total_page
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.total_page is not None:
result['TotalPage'] = self.total_page
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = ListDevicesResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('TotalPage') is not None:
self.total_page = m.get('TotalPage')
return self
class ListDevicesResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: ListDevicesResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = ListDevicesResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ListDevicesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListDevicesResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListDevicesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListEventAlgorithmDetailsRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
data_source_id: str = None,
end_time: str = None,
event_type: str = None,
event_value: str = None,
extend_value: str = None,
page_number: int = None,
page_size: int = None,
record_id: str = None,
source_id: str = None,
start_time: str = None,
):
self.corp_id = corp_id
self.data_source_id = data_source_id
self.end_time = end_time
self.event_type = event_type
self.event_value = event_value
self.extend_value = extend_value
self.page_number = page_number
self.page_size = page_size
self.record_id = record_id
self.source_id = source_id
self.start_time = start_time
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.data_source_id is not None:
result['DataSourceId'] = self.data_source_id
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.event_type is not None:
result['EventType'] = self.event_type
if self.event_value is not None:
result['EventValue'] = self.event_value
if self.extend_value is not None:
result['ExtendValue'] = self.extend_value
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.record_id is not None:
result['RecordId'] = self.record_id
if self.source_id is not None:
result['SourceId'] = self.source_id
if self.start_time is not None:
result['StartTime'] = self.start_time
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DataSourceId') is not None:
self.data_source_id = m.get('DataSourceId')
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('EventType') is not None:
self.event_type = m.get('EventType')
if m.get('EventValue') is not None:
self.event_value = m.get('EventValue')
if m.get('ExtendValue') is not None:
self.extend_value = m.get('ExtendValue')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('RecordId') is not None:
self.record_id = m.get('RecordId')
if m.get('SourceId') is not None:
self.source_id = m.get('SourceId')
if m.get('StartTime') is not None:
self.start_time = m.get('StartTime')
return self
class ListEventAlgorithmDetailsResponseBodyData(TeaModel):
def __init__(
self,
corp_id: str = None,
data_source_id: str = None,
event_type: str = None,
event_value: str = None,
extend_value: str = None,
extra_extend_value: str = None,
face_count: str = None,
left_top_x: str = None,
left_top_y: str = None,
pic_url_path: str = None,
point_x: str = None,
point_y: str = None,
record_id: str = None,
right_bottom_x: str = None,
right_bottom_y: str = None,
shot_time: str = None,
source_id: str = None,
tag_code: str = None,
tag_code_reliability: str = None,
target_pic_url_path: str = None,
uuid_code: str = None,
):
self.corp_id = corp_id
self.data_source_id = data_source_id
self.event_type = event_type
self.event_value = event_value
self.extend_value = extend_value
self.extra_extend_value = extra_extend_value
self.face_count = face_count
self.left_top_x = left_top_x
self.left_top_y = left_top_y
self.pic_url_path = pic_url_path
self.point_x = point_x
self.point_y = point_y
self.record_id = record_id
self.right_bottom_x = right_bottom_x
self.right_bottom_y = right_bottom_y
self.shot_time = shot_time
self.source_id = source_id
self.tag_code = tag_code
self.tag_code_reliability = tag_code_reliability
self.target_pic_url_path = target_pic_url_path
self.uuid_code = uuid_code
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.data_source_id is not None:
result['DataSourceId'] = self.data_source_id
if self.event_type is not None:
result['EventType'] = self.event_type
if self.event_value is not None:
result['EventValue'] = self.event_value
if self.extend_value is not None:
result['ExtendValue'] = self.extend_value
if self.extra_extend_value is not None:
result['ExtraExtendValue'] = self.extra_extend_value
if self.face_count is not None:
result['FaceCount'] = self.face_count
if self.left_top_x is not None:
result['LeftTopX'] = self.left_top_x
if self.left_top_y is not None:
result['LeftTopY'] = self.left_top_y
if self.pic_url_path is not None:
result['PicUrlPath'] = self.pic_url_path
if self.point_x is not None:
result['PointX'] = self.point_x
if self.point_y is not None:
result['PointY'] = self.point_y
if self.record_id is not None:
result['RecordId'] = self.record_id
if self.right_bottom_x is not None:
result['RightBottomX'] = self.right_bottom_x
if self.right_bottom_y is not None:
result['RightBottomY'] = self.right_bottom_y
if self.shot_time is not None:
result['ShotTime'] = self.shot_time
if self.source_id is not None:
result['SourceId'] = self.source_id
if self.tag_code is not None:
result['TagCode'] = self.tag_code
if self.tag_code_reliability is not None:
result['TagCodeReliability'] = self.tag_code_reliability
if self.target_pic_url_path is not None:
result['TargetPicUrlPath'] = self.target_pic_url_path
if self.uuid_code is not None:
result['UuidCode'] = self.uuid_code
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DataSourceId') is not None:
self.data_source_id = m.get('DataSourceId')
if m.get('EventType') is not None:
self.event_type = m.get('EventType')
if m.get('EventValue') is not None:
self.event_value = m.get('EventValue')
if m.get('ExtendValue') is not None:
self.extend_value = m.get('ExtendValue')
if m.get('ExtraExtendValue') is not None:
self.extra_extend_value = m.get('ExtraExtendValue')
if m.get('FaceCount') is not None:
self.face_count = m.get('FaceCount')
if m.get('LeftTopX') is not None:
self.left_top_x = m.get('LeftTopX')
if m.get('LeftTopY') is not None:
self.left_top_y = m.get('LeftTopY')
if m.get('PicUrlPath') is not None:
self.pic_url_path = m.get('PicUrlPath')
if m.get('PointX') is not None:
self.point_x = m.get('PointX')
if m.get('PointY') is not None:
self.point_y = m.get('PointY')
if m.get('RecordId') is not None:
self.record_id = m.get('RecordId')
if m.get('RightBottomX') is not None:
self.right_bottom_x = m.get('RightBottomX')
if m.get('RightBottomY') is not None:
self.right_bottom_y = m.get('RightBottomY')
if m.get('ShotTime') is not None:
self.shot_time = m.get('ShotTime')
if m.get('SourceId') is not None:
self.source_id = m.get('SourceId')
if m.get('TagCode') is not None:
self.tag_code = m.get('TagCode')
if m.get('TagCodeReliability') is not None:
self.tag_code_reliability = m.get('TagCodeReliability')
if m.get('TargetPicUrlPath') is not None:
self.target_pic_url_path = m.get('TargetPicUrlPath')
if m.get('UuidCode') is not None:
self.uuid_code = m.get('UuidCode')
return self
class ListEventAlgorithmDetailsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: List[ListEventAlgorithmDetailsResponseBodyData] = None,
message: str = None,
page_number: int = None,
page_size: int = None,
request_id: str = None,
success: str = None,
total_count: int = None,
):
self.code = code
self.data = data
self.message = message
self.page_number = page_number
self.page_size = page_size
self.request_id = request_id
self.success = success
self.total_count = total_count
def validate(self):
if self.data:
for k in self.data:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
result['Data'] = []
if self.data is not None:
for k in self.data:
result['Data'].append(k.to_map() if k else None)
if self.message is not None:
result['Message'] = self.message
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
if self.total_count is not None:
result['TotalCount'] = self.total_count
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
self.data = []
if m.get('Data') is not None:
for k in m.get('Data'):
temp_model = ListEventAlgorithmDetailsResponseBodyData()
self.data.append(temp_model.from_map(k))
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
return self
class ListEventAlgorithmDetailsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListEventAlgorithmDetailsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListEventAlgorithmDetailsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListEventAlgorithmResultsRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
data_source_id: str = None,
end_time: str = None,
event_type: str = None,
extend_value: str = None,
page_number: str = None,
page_size: str = None,
start_time: str = None,
):
self.corp_id = corp_id
self.data_source_id = data_source_id
self.end_time = end_time
self.event_type = event_type
self.extend_value = extend_value
self.page_number = page_number
self.page_size = page_size
self.start_time = start_time
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.data_source_id is not None:
result['DataSourceId'] = self.data_source_id
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.event_type is not None:
result['EventType'] = self.event_type
if self.extend_value is not None:
result['ExtendValue'] = self.extend_value
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.start_time is not None:
result['StartTime'] = self.start_time
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DataSourceId') is not None:
self.data_source_id = m.get('DataSourceId')
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('EventType') is not None:
self.event_type = m.get('EventType')
if m.get('ExtendValue') is not None:
self.extend_value = m.get('ExtendValue')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('StartTime') is not None:
self.start_time = m.get('StartTime')
return self
class ListEventAlgorithmResultsResponseBodyDataRecords(TeaModel):
def __init__(
self,
cap_style: str = None,
corp_id: str = None,
data_source_id: str = None,
event_type: str = None,
extend_value: str = None,
extend_value_three: str = None,
extend_value_two: str = None,
face_count: str = None,
pic_url_path: str = None,
record_id: str = None,
shot_time: str = None,
tag_code: str = None,
tag_code_reliability: str = None,
target_pic_url_path: str = None,
uuid_code: str = None,
):
self.cap_style = cap_style
self.corp_id = corp_id
self.data_source_id = data_source_id
self.event_type = event_type
self.extend_value = extend_value
self.extend_value_three = extend_value_three
self.extend_value_two = extend_value_two
self.face_count = face_count
self.pic_url_path = pic_url_path
self.record_id = record_id
self.shot_time = shot_time
self.tag_code = tag_code
self.tag_code_reliability = tag_code_reliability
self.target_pic_url_path = target_pic_url_path
self.uuid_code = uuid_code
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.cap_style is not None:
result['CapStyle'] = self.cap_style
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.data_source_id is not None:
result['DataSourceId'] = self.data_source_id
if self.event_type is not None:
result['EventType'] = self.event_type
if self.extend_value is not None:
result['ExtendValue'] = self.extend_value
if self.extend_value_three is not None:
result['ExtendValueThree'] = self.extend_value_three
if self.extend_value_two is not None:
result['ExtendValueTwo'] = self.extend_value_two
if self.face_count is not None:
result['FaceCount'] = self.face_count
if self.pic_url_path is not None:
result['PicUrlPath'] = self.pic_url_path
if self.record_id is not None:
result['RecordId'] = self.record_id
if self.shot_time is not None:
result['ShotTime'] = self.shot_time
if self.tag_code is not None:
result['TagCode'] = self.tag_code
if self.tag_code_reliability is not None:
result['TagCodeReliability'] = self.tag_code_reliability
if self.target_pic_url_path is not None:
result['TargetPicUrlPath'] = self.target_pic_url_path
if self.uuid_code is not None:
result['UuidCode'] = self.uuid_code
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CapStyle') is not None:
self.cap_style = m.get('CapStyle')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DataSourceId') is not None:
self.data_source_id = m.get('DataSourceId')
if m.get('EventType') is not None:
self.event_type = m.get('EventType')
if m.get('ExtendValue') is not None:
self.extend_value = m.get('ExtendValue')
if m.get('ExtendValueThree') is not None:
self.extend_value_three = m.get('ExtendValueThree')
if m.get('ExtendValueTwo') is not None:
self.extend_value_two = m.get('ExtendValueTwo')
if m.get('FaceCount') is not None:
self.face_count = m.get('FaceCount')
if m.get('PicUrlPath') is not None:
self.pic_url_path = m.get('PicUrlPath')
if m.get('RecordId') is not None:
self.record_id = m.get('RecordId')
if m.get('ShotTime') is not None:
self.shot_time = m.get('ShotTime')
if m.get('TagCode') is not None:
self.tag_code = m.get('TagCode')
if m.get('TagCodeReliability') is not None:
self.tag_code_reliability = m.get('TagCodeReliability')
if m.get('TargetPicUrlPath') is not None:
self.target_pic_url_path = m.get('TargetPicUrlPath')
if m.get('UuidCode') is not None:
self.uuid_code = m.get('UuidCode')
return self
class ListEventAlgorithmResultsResponseBodyData(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
records: List[ListEventAlgorithmResultsResponseBodyDataRecords] = None,
total_count: int = None,
total_page: int = None,
):
self.page_number = page_number
self.page_size = page_size
self.records = records
self.total_count = total_count
self.total_page = total_page
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.total_page is not None:
result['TotalPage'] = self.total_page
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = ListEventAlgorithmResultsResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('TotalPage') is not None:
self.total_page = m.get('TotalPage')
return self
class ListEventAlgorithmResultsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: ListEventAlgorithmResultsResponseBodyData = None,
extend_value: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.extend_value = extend_value
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.extend_value is not None:
result['ExtendValue'] = self.extend_value
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = ListEventAlgorithmResultsResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('ExtendValue') is not None:
self.extend_value = m.get('ExtendValue')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ListEventAlgorithmResultsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListEventAlgorithmResultsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListEventAlgorithmResultsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListFaceAlgorithmResultsRequest(TeaModel):
def __init__(
self,
algorithm_type: str = None,
corp_id: str = None,
data_source_id: str = None,
end_time: str = None,
page_number: str = None,
page_size: str = None,
start_time: str = None,
):
self.algorithm_type = algorithm_type
self.corp_id = corp_id
self.data_source_id = data_source_id
self.end_time = end_time
self.page_number = page_number
self.page_size = page_size
self.start_time = start_time
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_type is not None:
result['AlgorithmType'] = self.algorithm_type
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.data_source_id is not None:
result['DataSourceId'] = self.data_source_id
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.start_time is not None:
result['StartTime'] = self.start_time
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmType') is not None:
self.algorithm_type = m.get('AlgorithmType')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DataSourceId') is not None:
self.data_source_id = m.get('DataSourceId')
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('StartTime') is not None:
self.start_time = m.get('StartTime')
return self
class ListFaceAlgorithmResultsResponseBodyDataRecords(TeaModel):
def __init__(
self,
cap_style: str = None,
corp_id: str = None,
data_source_id: str = None,
face_id: str = None,
gender_code: str = None,
hair_style: str = None,
left_top_x: float = None,
left_top_y: float = None,
max_age: str = None,
min_age: str = None,
pic_url_path: str = None,
right_bottom_x: float = None,
right_bottom_y: float = None,
shot_time: str = None,
source_id: str = None,
target_pic_url_path: str = None,
):
self.cap_style = cap_style
self.corp_id = corp_id
self.data_source_id = data_source_id
self.face_id = face_id
self.gender_code = gender_code
self.hair_style = hair_style
self.left_top_x = left_top_x
self.left_top_y = left_top_y
self.max_age = max_age
self.min_age = min_age
self.pic_url_path = pic_url_path
self.right_bottom_x = right_bottom_x
self.right_bottom_y = right_bottom_y
self.shot_time = shot_time
self.source_id = source_id
self.target_pic_url_path = target_pic_url_path
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.cap_style is not None:
result['CapStyle'] = self.cap_style
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.data_source_id is not None:
result['DataSourceId'] = self.data_source_id
if self.face_id is not None:
result['FaceId'] = self.face_id
if self.gender_code is not None:
result['GenderCode'] = self.gender_code
if self.hair_style is not None:
result['HairStyle'] = self.hair_style
if self.left_top_x is not None:
result['LeftTopX'] = self.left_top_x
if self.left_top_y is not None:
result['LeftTopY'] = self.left_top_y
if self.max_age is not None:
result['MaxAge'] = self.max_age
if self.min_age is not None:
result['MinAge'] = self.min_age
if self.pic_url_path is not None:
result['PicUrlPath'] = self.pic_url_path
if self.right_bottom_x is not None:
result['RightBottomX'] = self.right_bottom_x
if self.right_bottom_y is not None:
result['RightBottomY'] = self.right_bottom_y
if self.shot_time is not None:
result['ShotTime'] = self.shot_time
if self.source_id is not None:
result['SourceId'] = self.source_id
if self.target_pic_url_path is not None:
result['TargetPicUrlPath'] = self.target_pic_url_path
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CapStyle') is not None:
self.cap_style = m.get('CapStyle')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DataSourceId') is not None:
self.data_source_id = m.get('DataSourceId')
if m.get('FaceId') is not None:
self.face_id = m.get('FaceId')
if m.get('GenderCode') is not None:
self.gender_code = m.get('GenderCode')
if m.get('HairStyle') is not None:
self.hair_style = m.get('HairStyle')
if m.get('LeftTopX') is not None:
self.left_top_x = m.get('LeftTopX')
if m.get('LeftTopY') is not None:
self.left_top_y = m.get('LeftTopY')
if m.get('MaxAge') is not None:
self.max_age = m.get('MaxAge')
if m.get('MinAge') is not None:
self.min_age = m.get('MinAge')
if m.get('PicUrlPath') is not None:
self.pic_url_path = m.get('PicUrlPath')
if m.get('RightBottomX') is not None:
self.right_bottom_x = m.get('RightBottomX')
if m.get('RightBottomY') is not None:
self.right_bottom_y = m.get('RightBottomY')
if m.get('ShotTime') is not None:
self.shot_time = m.get('ShotTime')
if m.get('SourceId') is not None:
self.source_id = m.get('SourceId')
if m.get('TargetPicUrlPath') is not None:
self.target_pic_url_path = m.get('TargetPicUrlPath')
return self
class ListFaceAlgorithmResultsResponseBodyData(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
records: List[ListFaceAlgorithmResultsResponseBodyDataRecords] = None,
total_count: int = None,
total_page: int = None,
):
self.page_number = page_number
self.page_size = page_size
self.records = records
self.total_count = total_count
self.total_page = total_page
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.total_page is not None:
result['TotalPage'] = self.total_page
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = ListFaceAlgorithmResultsResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('TotalPage') is not None:
self.total_page = m.get('TotalPage')
return self
class ListFaceAlgorithmResultsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: ListFaceAlgorithmResultsResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = ListFaceAlgorithmResultsResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ListFaceAlgorithmResultsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListFaceAlgorithmResultsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListFaceAlgorithmResultsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListInstancesRequest(TeaModel):
def __init__(
self,
current_page: int = None,
instance_name: str = None,
page_size: int = None,
project_id: str = None,
):
self.current_page = current_page
self.instance_name = instance_name
self.page_size = page_size
self.project_id = project_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.current_page is not None:
result['CurrentPage'] = self.current_page
if self.instance_name is not None:
result['InstanceName'] = self.instance_name
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.project_id is not None:
result['ProjectId'] = self.project_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CurrentPage') is not None:
self.current_page = m.get('CurrentPage')
if m.get('InstanceName') is not None:
self.instance_name = m.get('InstanceName')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('ProjectId') is not None:
self.project_id = m.get('ProjectId')
return self
class ListInstancesResponseBodyDataItemsAlgorithms(TeaModel):
def __init__(
self,
algorithm_id: str = None,
algorithm_name: str = None,
):
self.algorithm_id = algorithm_id
self.algorithm_name = algorithm_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_id is not None:
result['AlgorithmId'] = self.algorithm_id
if self.algorithm_name is not None:
result['AlgorithmName'] = self.algorithm_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmId') is not None:
self.algorithm_id = m.get('AlgorithmId')
if m.get('AlgorithmName') is not None:
self.algorithm_name = m.get('AlgorithmName')
return self
class ListInstancesResponseBodyDataItems(TeaModel):
def __init__(
self,
acu_used: int = None,
algorithms: List[ListInstancesResponseBodyDataItemsAlgorithms] = None,
create_time: int = None,
device_number: int = None,
instance_id: str = None,
instance_name: str = None,
status: str = None,
):
self.acu_used = acu_used
self.algorithms = algorithms
self.create_time = create_time
self.device_number = device_number
self.instance_id = instance_id
self.instance_name = instance_name
self.status = status
def validate(self):
if self.algorithms:
for k in self.algorithms:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.acu_used is not None:
result['AcuUsed'] = self.acu_used
result['Algorithms'] = []
if self.algorithms is not None:
for k in self.algorithms:
result['Algorithms'].append(k.to_map() if k else None)
if self.create_time is not None:
result['CreateTime'] = self.create_time
if self.device_number is not None:
result['DeviceNumber'] = self.device_number
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.instance_name is not None:
result['InstanceName'] = self.instance_name
if self.status is not None:
result['Status'] = self.status
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AcuUsed') is not None:
self.acu_used = m.get('AcuUsed')
self.algorithms = []
if m.get('Algorithms') is not None:
for k in m.get('Algorithms'):
temp_model = ListInstancesResponseBodyDataItemsAlgorithms()
self.algorithms.append(temp_model.from_map(k))
if m.get('CreateTime') is not None:
self.create_time = m.get('CreateTime')
if m.get('DeviceNumber') is not None:
self.device_number = m.get('DeviceNumber')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('InstanceName') is not None:
self.instance_name = m.get('InstanceName')
if m.get('Status') is not None:
self.status = m.get('Status')
return self
class ListInstancesResponseBodyData(TeaModel):
def __init__(
self,
current_page: int = None,
items: List[ListInstancesResponseBodyDataItems] = None,
page_size: int = None,
total_count: int = None,
):
self.current_page = current_page
self.items = items
self.page_size = page_size
self.total_count = total_count
def validate(self):
if self.items:
for k in self.items:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.current_page is not None:
result['CurrentPage'] = self.current_page
result['Items'] = []
if self.items is not None:
for k in self.items:
result['Items'].append(k.to_map() if k else None)
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.total_count is not None:
result['TotalCount'] = self.total_count
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CurrentPage') is not None:
self.current_page = m.get('CurrentPage')
self.items = []
if m.get('Items') is not None:
for k in m.get('Items'):
temp_model = ListInstancesResponseBodyDataItems()
self.items.append(temp_model.from_map(k))
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
return self
class ListInstancesResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: ListInstancesResponseBodyData = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
self.success = success
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = ListInstancesResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class ListInstancesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListInstancesResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListInstancesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListMetricsRequest(TeaModel):
def __init__(
self,
aggregate_type: str = None,
corp_id: str = None,
end_time: str = None,
page_number: str = None,
page_size: str = None,
start_time: str = None,
tag_code: str = None,
):
self.aggregate_type = aggregate_type
self.corp_id = corp_id
self.end_time = end_time
self.page_number = page_number
self.page_size = page_size
self.start_time = start_time
self.tag_code = tag_code
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.aggregate_type is not None:
result['AggregateType'] = self.aggregate_type
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.start_time is not None:
result['StartTime'] = self.start_time
if self.tag_code is not None:
result['TagCode'] = self.tag_code
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AggregateType') is not None:
self.aggregate_type = m.get('AggregateType')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('StartTime') is not None:
self.start_time = m.get('StartTime')
if m.get('TagCode') is not None:
self.tag_code = m.get('TagCode')
return self
class ListMetricsResponseBodyDataRecords(TeaModel):
def __init__(
self,
date_time: str = None,
tag_code: str = None,
tag_metric: str = None,
tag_value: str = None,
):
self.date_time = date_time
self.tag_code = tag_code
self.tag_metric = tag_metric
self.tag_value = tag_value
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.date_time is not None:
result['DateTime'] = self.date_time
if self.tag_code is not None:
result['TagCode'] = self.tag_code
if self.tag_metric is not None:
result['TagMetric'] = self.tag_metric
if self.tag_value is not None:
result['TagValue'] = self.tag_value
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DateTime') is not None:
self.date_time = m.get('DateTime')
if m.get('TagCode') is not None:
self.tag_code = m.get('TagCode')
if m.get('TagMetric') is not None:
self.tag_metric = m.get('TagMetric')
if m.get('TagValue') is not None:
self.tag_value = m.get('TagValue')
return self
class ListMetricsResponseBodyData(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
records: List[ListMetricsResponseBodyDataRecords] = None,
total_count: int = None,
total_page: int = None,
):
self.page_number = page_number
self.page_size = page_size
self.records = records
self.total_count = total_count
self.total_page = total_page
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.total_page is not None:
result['TotalPage'] = self.total_page
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = ListMetricsResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('TotalPage') is not None:
self.total_page = m.get('TotalPage')
return self
class ListMetricsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: ListMetricsResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = ListMetricsResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ListMetricsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListMetricsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListMetricsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListMotorAlgorithmResultsRequest(TeaModel):
def __init__(
self,
algorithm_type: str = None,
corp_id: str = None,
data_source_id: str = None,
end_time: str = None,
page_number: str = None,
page_size: str = None,
plate_number: str = None,
start_time: str = None,
):
self.algorithm_type = algorithm_type
self.corp_id = corp_id
self.data_source_id = data_source_id
self.end_time = end_time
self.page_number = page_number
self.page_size = page_size
self.plate_number = plate_number
self.start_time = start_time
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_type is not None:
result['AlgorithmType'] = self.algorithm_type
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.data_source_id is not None:
result['DataSourceId'] = self.data_source_id
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.plate_number is not None:
result['PlateNumber'] = self.plate_number
if self.start_time is not None:
result['StartTime'] = self.start_time
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmType') is not None:
self.algorithm_type = m.get('AlgorithmType')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DataSourceId') is not None:
self.data_source_id = m.get('DataSourceId')
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('PlateNumber') is not None:
self.plate_number = m.get('PlateNumber')
if m.get('StartTime') is not None:
self.start_time = m.get('StartTime')
return self
class ListMotorAlgorithmResultsResponseBodyDataRecords(TeaModel):
def __init__(
self,
calling: str = None,
corp_id: str = None,
data_source_id: str = None,
left_top_x: float = None,
left_top_y: float = None,
motor_brand: str = None,
motor_class: str = None,
motor_color: str = None,
motor_id: str = None,
motor_model: str = None,
motor_style: str = None,
pic_url_path: str = None,
plate_class: str = None,
plate_color: str = None,
plate_number: str = None,
right_bottom_x: float = None,
right_bottom_y: float = None,
safety_belt: str = None,
shot_time: str = None,
source_id: str = None,
target_pic_url_path: str = None,
):
self.calling = calling
self.corp_id = corp_id
self.data_source_id = data_source_id
self.left_top_x = left_top_x
self.left_top_y = left_top_y
self.motor_brand = motor_brand
self.motor_class = motor_class
self.motor_color = motor_color
self.motor_id = motor_id
self.motor_model = motor_model
self.motor_style = motor_style
self.pic_url_path = pic_url_path
self.plate_class = plate_class
self.plate_color = plate_color
self.plate_number = plate_number
self.right_bottom_x = right_bottom_x
self.right_bottom_y = right_bottom_y
self.safety_belt = safety_belt
self.shot_time = shot_time
self.source_id = source_id
self.target_pic_url_path = target_pic_url_path
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.calling is not None:
result['Calling'] = self.calling
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.data_source_id is not None:
result['DataSourceId'] = self.data_source_id
if self.left_top_x is not None:
result['LeftTopX'] = self.left_top_x
if self.left_top_y is not None:
result['LeftTopY'] = self.left_top_y
if self.motor_brand is not None:
result['MotorBrand'] = self.motor_brand
if self.motor_class is not None:
result['MotorClass'] = self.motor_class
if self.motor_color is not None:
result['MotorColor'] = self.motor_color
if self.motor_id is not None:
result['MotorId'] = self.motor_id
if self.motor_model is not None:
result['MotorModel'] = self.motor_model
if self.motor_style is not None:
result['MotorStyle'] = self.motor_style
if self.pic_url_path is not None:
result['PicUrlPath'] = self.pic_url_path
if self.plate_class is not None:
result['PlateClass'] = self.plate_class
if self.plate_color is not None:
result['PlateColor'] = self.plate_color
if self.plate_number is not None:
result['PlateNumber'] = self.plate_number
if self.right_bottom_x is not None:
result['RightBottomX'] = self.right_bottom_x
if self.right_bottom_y is not None:
result['RightBottomY'] = self.right_bottom_y
if self.safety_belt is not None:
result['SafetyBelt'] = self.safety_belt
if self.shot_time is not None:
result['ShotTime'] = self.shot_time
if self.source_id is not None:
result['SourceId'] = self.source_id
if self.target_pic_url_path is not None:
result['TargetPicUrlPath'] = self.target_pic_url_path
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Calling') is not None:
self.calling = m.get('Calling')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DataSourceId') is not None:
self.data_source_id = m.get('DataSourceId')
if m.get('LeftTopX') is not None:
self.left_top_x = m.get('LeftTopX')
if m.get('LeftTopY') is not None:
self.left_top_y = m.get('LeftTopY')
if m.get('MotorBrand') is not None:
self.motor_brand = m.get('MotorBrand')
if m.get('MotorClass') is not None:
self.motor_class = m.get('MotorClass')
if m.get('MotorColor') is not None:
self.motor_color = m.get('MotorColor')
if m.get('MotorId') is not None:
self.motor_id = m.get('MotorId')
if m.get('MotorModel') is not None:
self.motor_model = m.get('MotorModel')
if m.get('MotorStyle') is not None:
self.motor_style = m.get('MotorStyle')
if m.get('PicUrlPath') is not None:
self.pic_url_path = m.get('PicUrlPath')
if m.get('PlateClass') is not None:
self.plate_class = m.get('PlateClass')
if m.get('PlateColor') is not None:
self.plate_color = m.get('PlateColor')
if m.get('PlateNumber') is not None:
self.plate_number = m.get('PlateNumber')
if m.get('RightBottomX') is not None:
self.right_bottom_x = m.get('RightBottomX')
if m.get('RightBottomY') is not None:
self.right_bottom_y = m.get('RightBottomY')
if m.get('SafetyBelt') is not None:
self.safety_belt = m.get('SafetyBelt')
if m.get('ShotTime') is not None:
self.shot_time = m.get('ShotTime')
if m.get('SourceId') is not None:
self.source_id = m.get('SourceId')
if m.get('TargetPicUrlPath') is not None:
self.target_pic_url_path = m.get('TargetPicUrlPath')
return self
class ListMotorAlgorithmResultsResponseBodyData(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
records: List[ListMotorAlgorithmResultsResponseBodyDataRecords] = None,
total_count: int = None,
total_page: int = None,
):
self.page_number = page_number
self.page_size = page_size
self.records = records
self.total_count = total_count
self.total_page = total_page
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.total_page is not None:
result['TotalPage'] = self.total_page
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = ListMotorAlgorithmResultsResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('TotalPage') is not None:
self.total_page = m.get('TotalPage')
return self
class ListMotorAlgorithmResultsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: ListMotorAlgorithmResultsResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = ListMotorAlgorithmResultsResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ListMotorAlgorithmResultsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListMotorAlgorithmResultsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListMotorAlgorithmResultsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListNVRChannelDeviceRequest(TeaModel):
def __init__(
self,
device_code: str = None,
is_page: str = None,
page_num: str = None,
page_size: str = None,
):
self.device_code = device_code
self.is_page = is_page
self.page_num = page_num
self.page_size = page_size
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_code is not None:
result['DeviceCode'] = self.device_code
if self.is_page is not None:
result['IsPage'] = self.is_page
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceCode') is not None:
self.device_code = m.get('DeviceCode')
if m.get('IsPage') is not None:
self.is_page = m.get('IsPage')
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
return self
class ListNVRChannelDeviceResponseBodyData(TeaModel):
def __init__(
self,
compture_status: str = None,
corp_id: str = None,
datasource_type: str = None,
device_code: str = None,
device_name: str = None,
device_sn: str = None,
device_status: str = None,
device_type: str = None,
project_name: str = None,
region_name: str = None,
sample_name: str = None,
stream_status: str = None,
vap: str = None,
):
self.compture_status = compture_status
self.corp_id = corp_id
self.datasource_type = datasource_type
self.device_code = device_code
self.device_name = device_name
self.device_sn = device_sn
self.device_status = device_status
self.device_type = device_type
self.project_name = project_name
self.region_name = region_name
self.sample_name = sample_name
self.stream_status = stream_status
self.vap = vap
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.compture_status is not None:
result['ComptureStatus'] = self.compture_status
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.datasource_type is not None:
result['DatasourceType'] = self.datasource_type
if self.device_code is not None:
result['DeviceCode'] = self.device_code
if self.device_name is not None:
result['DeviceName'] = self.device_name
if self.device_sn is not None:
result['DeviceSn'] = self.device_sn
if self.device_status is not None:
result['DeviceStatus'] = self.device_status
if self.device_type is not None:
result['DeviceType'] = self.device_type
if self.project_name is not None:
result['ProjectName'] = self.project_name
if self.region_name is not None:
result['RegionName'] = self.region_name
if self.sample_name is not None:
result['SampleName'] = self.sample_name
if self.stream_status is not None:
result['StreamStatus'] = self.stream_status
if self.vap is not None:
result['Vap'] = self.vap
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ComptureStatus') is not None:
self.compture_status = m.get('ComptureStatus')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DatasourceType') is not None:
self.datasource_type = m.get('DatasourceType')
if m.get('DeviceCode') is not None:
self.device_code = m.get('DeviceCode')
if m.get('DeviceName') is not None:
self.device_name = m.get('DeviceName')
if m.get('DeviceSn') is not None:
self.device_sn = m.get('DeviceSn')
if m.get('DeviceStatus') is not None:
self.device_status = m.get('DeviceStatus')
if m.get('DeviceType') is not None:
self.device_type = m.get('DeviceType')
if m.get('ProjectName') is not None:
self.project_name = m.get('ProjectName')
if m.get('RegionName') is not None:
self.region_name = m.get('RegionName')
if m.get('SampleName') is not None:
self.sample_name = m.get('SampleName')
if m.get('StreamStatus') is not None:
self.stream_status = m.get('StreamStatus')
if m.get('Vap') is not None:
self.vap = m.get('Vap')
return self
class ListNVRChannelDeviceResponseBody(TeaModel):
def __init__(
self,
data: List[ListNVRChannelDeviceResponseBodyData] = None,
message: str = None,
request_id: str = None,
success: bool = None,
total: str = None,
):
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
self.total = total
def validate(self):
if self.data:
for k in self.data:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['Data'] = []
if self.data is not None:
for k in self.data:
result['Data'].append(k.to_map() if k else None)
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
if self.total is not None:
result['Total'] = self.total
return result
def from_map(self, m: dict = None):
m = m or dict()
self.data = []
if m.get('Data') is not None:
for k in m.get('Data'):
temp_model = ListNVRChannelDeviceResponseBodyData()
self.data.append(temp_model.from_map(k))
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
if m.get('Total') is not None:
self.total = m.get('Total')
return self
class ListNVRChannelDeviceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListNVRChannelDeviceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListNVRChannelDeviceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListNVRDeviceRequest(TeaModel):
def __init__(
self,
corp_id_list: str = None,
device_code: str = None,
is_page: int = None,
page_num: int = None,
page_size: int = None,
):
self.corp_id_list = corp_id_list
self.device_code = device_code
self.is_page = is_page
self.page_num = page_num
self.page_size = page_size
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id_list is not None:
result['CorpIdList'] = self.corp_id_list
if self.device_code is not None:
result['DeviceCode'] = self.device_code
if self.is_page is not None:
result['IsPage'] = self.is_page
if self.page_num is not None:
result['PageNum'] = self.page_num
if self.page_size is not None:
result['PageSize'] = self.page_size
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpIdList') is not None:
self.corp_id_list = m.get('CorpIdList')
if m.get('DeviceCode') is not None:
self.device_code = m.get('DeviceCode')
if m.get('IsPage') is not None:
self.is_page = m.get('IsPage')
if m.get('PageNum') is not None:
self.page_num = m.get('PageNum')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
return self
class ListNVRDeviceResponseBodyData(TeaModel):
def __init__(
self,
access_quota: str = None,
channel: str = None,
compture_status: str = None,
corp_id: str = None,
datasource_type: str = None,
device_code: str = None,
device_name: str = None,
device_sn: str = None,
device_status: str = None,
device_type: str = None,
project_name: str = None,
region_name: str = None,
registration_time: str = None,
stream_status: str = None,
type: str = None,
):
self.access_quota = access_quota
self.channel = channel
self.compture_status = compture_status
self.corp_id = corp_id
self.datasource_type = datasource_type
self.device_code = device_code
self.device_name = device_name
self.device_sn = device_sn
self.device_status = device_status
self.device_type = device_type
self.project_name = project_name
self.region_name = region_name
self.registration_time = registration_time
self.stream_status = stream_status
self.type = type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.access_quota is not None:
result['AccessQuota'] = self.access_quota
if self.channel is not None:
result['Channel'] = self.channel
if self.compture_status is not None:
result['ComptureStatus'] = self.compture_status
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.datasource_type is not None:
result['DatasourceType'] = self.datasource_type
if self.device_code is not None:
result['DeviceCode'] = self.device_code
if self.device_name is not None:
result['DeviceName'] = self.device_name
if self.device_sn is not None:
result['DeviceSn'] = self.device_sn
if self.device_status is not None:
result['DeviceStatus'] = self.device_status
if self.device_type is not None:
result['DeviceType'] = self.device_type
if self.project_name is not None:
result['ProjectName'] = self.project_name
if self.region_name is not None:
result['RegionName'] = self.region_name
if self.registration_time is not None:
result['RegistrationTime'] = self.registration_time
if self.stream_status is not None:
result['StreamStatus'] = self.stream_status
if self.type is not None:
result['Type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AccessQuota') is not None:
self.access_quota = m.get('AccessQuota')
if m.get('Channel') is not None:
self.channel = m.get('Channel')
if m.get('ComptureStatus') is not None:
self.compture_status = m.get('ComptureStatus')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DatasourceType') is not None:
self.datasource_type = m.get('DatasourceType')
if m.get('DeviceCode') is not None:
self.device_code = m.get('DeviceCode')
if m.get('DeviceName') is not None:
self.device_name = m.get('DeviceName')
if m.get('DeviceSn') is not None:
self.device_sn = m.get('DeviceSn')
if m.get('DeviceStatus') is not None:
self.device_status = m.get('DeviceStatus')
if m.get('DeviceType') is not None:
self.device_type = m.get('DeviceType')
if m.get('ProjectName') is not None:
self.project_name = m.get('ProjectName')
if m.get('RegionName') is not None:
self.region_name = m.get('RegionName')
if m.get('RegistrationTime') is not None:
self.registration_time = m.get('RegistrationTime')
if m.get('StreamStatus') is not None:
self.stream_status = m.get('StreamStatus')
if m.get('Type') is not None:
self.type = m.get('Type')
return self
class ListNVRDeviceResponseBody(TeaModel):
def __init__(
self,
data: List[ListNVRDeviceResponseBodyData] = None,
request_id: str = None,
success: bool = None,
total: str = None,
):
self.data = data
# Id of the request
self.request_id = request_id
self.success = success
self.total = total
def validate(self):
if self.data:
for k in self.data:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['Data'] = []
if self.data is not None:
for k in self.data:
result['Data'].append(k.to_map() if k else None)
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
if self.total is not None:
result['Total'] = self.total
return result
def from_map(self, m: dict = None):
m = m or dict()
self.data = []
if m.get('Data') is not None:
for k in m.get('Data'):
temp_model = ListNVRDeviceResponseBodyData()
self.data.append(temp_model.from_map(k))
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
if m.get('Total') is not None:
self.total = m.get('Total')
return self
class ListNVRDeviceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListNVRDeviceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListNVRDeviceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListPersonTraceRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
data_source_id: str = None,
end_time: str = None,
group_id: str = None,
page_number: str = None,
page_size: str = None,
person_id: str = None,
start_time: str = None,
):
self.corp_id = corp_id
self.data_source_id = data_source_id
self.end_time = end_time
self.group_id = group_id
self.page_number = page_number
self.page_size = page_size
self.person_id = person_id
self.start_time = start_time
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.data_source_id is not None:
result['DataSourceId'] = self.data_source_id
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.group_id is not None:
result['GroupId'] = self.group_id
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.start_time is not None:
result['StartTime'] = self.start_time
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DataSourceId') is not None:
self.data_source_id = m.get('DataSourceId')
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('GroupId') is not None:
self.group_id = m.get('GroupId')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('StartTime') is not None:
self.start_time = m.get('StartTime')
return self
class ListPersonTraceResponseBodyData(TeaModel):
def __init__(
self,
corp_id: str = None,
date: str = None,
device_id: str = None,
end_source_image: str = None,
end_target_image: str = None,
group_id: str = None,
last_time: str = None,
person_id: str = None,
start_source_image: str = None,
start_target_image: str = None,
start_time: str = None,
):
self.corp_id = corp_id
self.date = date
self.device_id = device_id
self.end_source_image = end_source_image
self.end_target_image = end_target_image
self.group_id = group_id
self.last_time = last_time
self.person_id = person_id
self.start_source_image = start_source_image
self.start_target_image = start_target_image
self.start_time = start_time
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.date is not None:
result['Date'] = self.date
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.end_source_image is not None:
result['EndSourceImage'] = self.end_source_image
if self.end_target_image is not None:
result['EndTargetImage'] = self.end_target_image
if self.group_id is not None:
result['GroupId'] = self.group_id
if self.last_time is not None:
result['LastTime'] = self.last_time
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.start_source_image is not None:
result['StartSourceImage'] = self.start_source_image
if self.start_target_image is not None:
result['StartTargetImage'] = self.start_target_image
if self.start_time is not None:
result['StartTime'] = self.start_time
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('Date') is not None:
self.date = m.get('Date')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('EndSourceImage') is not None:
self.end_source_image = m.get('EndSourceImage')
if m.get('EndTargetImage') is not None:
self.end_target_image = m.get('EndTargetImage')
if m.get('GroupId') is not None:
self.group_id = m.get('GroupId')
if m.get('LastTime') is not None:
self.last_time = m.get('LastTime')
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('StartSourceImage') is not None:
self.start_source_image = m.get('StartSourceImage')
if m.get('StartTargetImage') is not None:
self.start_target_image = m.get('StartTargetImage')
if m.get('StartTime') is not None:
self.start_time = m.get('StartTime')
return self
class ListPersonTraceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: List[ListPersonTraceResponseBodyData] = None,
message: str = None,
page_number: int = None,
page_size: int = None,
request_id: str = None,
success: str = None,
total_count: int = None,
):
self.code = code
self.data = data
self.message = message
self.page_number = page_number
self.page_size = page_size
self.request_id = request_id
self.success = success
self.total_count = total_count
def validate(self):
if self.data:
for k in self.data:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
result['Data'] = []
if self.data is not None:
for k in self.data:
result['Data'].append(k.to_map() if k else None)
if self.message is not None:
result['Message'] = self.message
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
if self.total_count is not None:
result['TotalCount'] = self.total_count
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
self.data = []
if m.get('Data') is not None:
for k in m.get('Data'):
temp_model = ListPersonTraceResponseBodyData()
self.data.append(temp_model.from_map(k))
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
return self
class ListPersonTraceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListPersonTraceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListPersonTraceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListPersonTraceDetailsRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
data_source_id: str = None,
end_time: str = None,
page_number: int = None,
page_size: int = None,
person_id: str = None,
start_time: str = None,
sub_id: str = None,
):
self.corp_id = corp_id
self.data_source_id = data_source_id
self.end_time = end_time
self.page_number = page_number
self.page_size = page_size
self.person_id = person_id
self.start_time = start_time
self.sub_id = sub_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.data_source_id is not None:
result['DataSourceId'] = self.data_source_id
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.start_time is not None:
result['StartTime'] = self.start_time
if self.sub_id is not None:
result['SubId'] = self.sub_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DataSourceId') is not None:
self.data_source_id = m.get('DataSourceId')
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('StartTime') is not None:
self.start_time = m.get('StartTime')
if m.get('SubId') is not None:
self.sub_id = m.get('SubId')
return self
class ListPersonTraceDetailsResponseBodyData(TeaModel):
def __init__(
self,
corp_id: str = None,
data_source_id: str = None,
left_top_x: str = None,
left_top_y: str = None,
person_id: str = None,
pic_url_path: str = None,
right_bottom_x: str = None,
right_bottom_y: str = None,
shot_time: str = None,
sub_id: str = None,
target_pic_url_path: str = None,
):
self.corp_id = corp_id
self.data_source_id = data_source_id
self.left_top_x = left_top_x
self.left_top_y = left_top_y
self.person_id = person_id
self.pic_url_path = pic_url_path
self.right_bottom_x = right_bottom_x
self.right_bottom_y = right_bottom_y
self.shot_time = shot_time
self.sub_id = sub_id
self.target_pic_url_path = target_pic_url_path
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.data_source_id is not None:
result['DataSourceId'] = self.data_source_id
if self.left_top_x is not None:
result['LeftTopX'] = self.left_top_x
if self.left_top_y is not None:
result['LeftTopY'] = self.left_top_y
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.pic_url_path is not None:
result['PicUrlPath'] = self.pic_url_path
if self.right_bottom_x is not None:
result['RightBottomX'] = self.right_bottom_x
if self.right_bottom_y is not None:
result['RightBottomY'] = self.right_bottom_y
if self.shot_time is not None:
result['ShotTime'] = self.shot_time
if self.sub_id is not None:
result['SubId'] = self.sub_id
if self.target_pic_url_path is not None:
result['TargetPicUrlPath'] = self.target_pic_url_path
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DataSourceId') is not None:
self.data_source_id = m.get('DataSourceId')
if m.get('LeftTopX') is not None:
self.left_top_x = m.get('LeftTopX')
if m.get('LeftTopY') is not None:
self.left_top_y = m.get('LeftTopY')
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('PicUrlPath') is not None:
self.pic_url_path = m.get('PicUrlPath')
if m.get('RightBottomX') is not None:
self.right_bottom_x = m.get('RightBottomX')
if m.get('RightBottomY') is not None:
self.right_bottom_y = m.get('RightBottomY')
if m.get('ShotTime') is not None:
self.shot_time = m.get('ShotTime')
if m.get('SubId') is not None:
self.sub_id = m.get('SubId')
if m.get('TargetPicUrlPath') is not None:
self.target_pic_url_path = m.get('TargetPicUrlPath')
return self
class ListPersonTraceDetailsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: List[ListPersonTraceDetailsResponseBodyData] = None,
message: str = None,
page_number: int = None,
page_size: int = None,
request_id: str = None,
total_count: int = None,
):
self.code = code
self.data = data
self.message = message
self.page_number = page_number
self.page_size = page_size
self.request_id = request_id
self.total_count = total_count
def validate(self):
if self.data:
for k in self.data:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
result['Data'] = []
if self.data is not None:
for k in self.data:
result['Data'].append(k.to_map() if k else None)
if self.message is not None:
result['Message'] = self.message
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.total_count is not None:
result['TotalCount'] = self.total_count
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
self.data = []
if m.get('Data') is not None:
for k in m.get('Data'):
temp_model = ListPersonTraceDetailsResponseBodyData()
self.data.append(temp_model.from_map(k))
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
return self
class ListPersonTraceDetailsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListPersonTraceDetailsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListPersonTraceDetailsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListPersonVisitCountRequest(TeaModel):
def __init__(
self,
aggregate_type: str = None,
corp_id: str = None,
count_type: str = None,
end_time: str = None,
max_val: int = None,
min_val: int = None,
page_number: int = None,
page_size: int = None,
start_time: str = None,
tag_code: str = None,
time_aggregate_type: str = None,
):
self.aggregate_type = aggregate_type
self.corp_id = corp_id
self.count_type = count_type
self.end_time = end_time
self.max_val = max_val
self.min_val = min_val
self.page_number = page_number
self.page_size = page_size
self.start_time = start_time
self.tag_code = tag_code
self.time_aggregate_type = time_aggregate_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.aggregate_type is not None:
result['AggregateType'] = self.aggregate_type
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.count_type is not None:
result['CountType'] = self.count_type
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.max_val is not None:
result['MaxVal'] = self.max_val
if self.min_val is not None:
result['MinVal'] = self.min_val
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.start_time is not None:
result['StartTime'] = self.start_time
if self.tag_code is not None:
result['TagCode'] = self.tag_code
if self.time_aggregate_type is not None:
result['TimeAggregateType'] = self.time_aggregate_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AggregateType') is not None:
self.aggregate_type = m.get('AggregateType')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('CountType') is not None:
self.count_type = m.get('CountType')
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('MaxVal') is not None:
self.max_val = m.get('MaxVal')
if m.get('MinVal') is not None:
self.min_val = m.get('MinVal')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('StartTime') is not None:
self.start_time = m.get('StartTime')
if m.get('TagCode') is not None:
self.tag_code = m.get('TagCode')
if m.get('TimeAggregateType') is not None:
self.time_aggregate_type = m.get('TimeAggregateType')
return self
class ListPersonVisitCountResponseBodyData(TeaModel):
def __init__(
self,
corp_id: str = None,
day_id: str = None,
device_id: str = None,
group_id: str = None,
hour_id: str = None,
person_id: str = None,
tag_code: str = None,
tag_metrics: str = None,
):
self.corp_id = corp_id
self.day_id = day_id
self.device_id = device_id
self.group_id = group_id
self.hour_id = hour_id
self.person_id = person_id
self.tag_code = tag_code
self.tag_metrics = tag_metrics
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.day_id is not None:
result['DayId'] = self.day_id
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.group_id is not None:
result['GroupId'] = self.group_id
if self.hour_id is not None:
result['HourId'] = self.hour_id
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.tag_code is not None:
result['TagCode'] = self.tag_code
if self.tag_metrics is not None:
result['TagMetrics'] = self.tag_metrics
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DayId') is not None:
self.day_id = m.get('DayId')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('GroupId') is not None:
self.group_id = m.get('GroupId')
if m.get('HourId') is not None:
self.hour_id = m.get('HourId')
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('TagCode') is not None:
self.tag_code = m.get('TagCode')
if m.get('TagMetrics') is not None:
self.tag_metrics = m.get('TagMetrics')
return self
class ListPersonVisitCountResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: List[ListPersonVisitCountResponseBodyData] = None,
message: str = None,
page_no: str = None,
page_size: str = None,
request_id: str = None,
success: str = None,
total_count: str = None,
):
self.code = code
self.data = data
self.message = message
self.page_no = page_no
self.page_size = page_size
self.request_id = request_id
self.success = success
self.total_count = total_count
def validate(self):
if self.data:
for k in self.data:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
result['Data'] = []
if self.data is not None:
for k in self.data:
result['Data'].append(k.to_map() if k else None)
if self.message is not None:
result['Message'] = self.message
if self.page_no is not None:
result['PageNo'] = self.page_no
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
if self.total_count is not None:
result['TotalCount'] = self.total_count
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
self.data = []
if m.get('Data') is not None:
for k in m.get('Data'):
temp_model = ListPersonVisitCountResponseBodyData()
self.data.append(temp_model.from_map(k))
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('PageNo') is not None:
self.page_no = m.get('PageNo')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
return self
class ListPersonVisitCountResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListPersonVisitCountResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListPersonVisitCountResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListPersonsRequest(TeaModel):
def __init__(
self,
algorithm_type: str = None,
corp_id: str = None,
end_time: str = None,
page_no: str = None,
page_size: str = None,
start_time: str = None,
):
self.algorithm_type = algorithm_type
self.corp_id = corp_id
self.end_time = end_time
self.page_no = page_no
self.page_size = page_size
self.start_time = start_time
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_type is not None:
result['AlgorithmType'] = self.algorithm_type
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.page_no is not None:
result['PageNo'] = self.page_no
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.start_time is not None:
result['StartTime'] = self.start_time
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmType') is not None:
self.algorithm_type = m.get('AlgorithmType')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('PageNo') is not None:
self.page_no = m.get('PageNo')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('StartTime') is not None:
self.start_time = m.get('StartTime')
return self
class ListPersonsResponseBodyDataRecordsTagList(TeaModel):
def __init__(
self,
tag_code: str = None,
tag_name: str = None,
tag_value: str = None,
tag_value_id: str = None,
):
self.tag_code = tag_code
self.tag_name = tag_name
self.tag_value = tag_value
self.tag_value_id = tag_value_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.tag_code is not None:
result['TagCode'] = self.tag_code
if self.tag_name is not None:
result['TagName'] = self.tag_name
if self.tag_value is not None:
result['TagValue'] = self.tag_value
if self.tag_value_id is not None:
result['TagValueId'] = self.tag_value_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('TagCode') is not None:
self.tag_code = m.get('TagCode')
if m.get('TagName') is not None:
self.tag_name = m.get('TagName')
if m.get('TagValue') is not None:
self.tag_value = m.get('TagValue')
if m.get('TagValueId') is not None:
self.tag_value_id = m.get('TagValueId')
return self
class ListPersonsResponseBodyDataRecords(TeaModel):
def __init__(
self,
first_appear_time: str = None,
person_id: str = None,
pic_url: str = None,
tag_list: List[ListPersonsResponseBodyDataRecordsTagList] = None,
):
self.first_appear_time = first_appear_time
self.person_id = person_id
self.pic_url = pic_url
self.tag_list = tag_list
def validate(self):
if self.tag_list:
for k in self.tag_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.first_appear_time is not None:
result['FirstAppearTime'] = self.first_appear_time
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.pic_url is not None:
result['PicUrl'] = self.pic_url
result['TagList'] = []
if self.tag_list is not None:
for k in self.tag_list:
result['TagList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('FirstAppearTime') is not None:
self.first_appear_time = m.get('FirstAppearTime')
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('PicUrl') is not None:
self.pic_url = m.get('PicUrl')
self.tag_list = []
if m.get('TagList') is not None:
for k in m.get('TagList'):
temp_model = ListPersonsResponseBodyDataRecordsTagList()
self.tag_list.append(temp_model.from_map(k))
return self
class ListPersonsResponseBodyData(TeaModel):
def __init__(
self,
page_no: str = None,
page_size: str = None,
records: List[ListPersonsResponseBodyDataRecords] = None,
total_count: str = None,
total_page: str = None,
):
self.page_no = page_no
self.page_size = page_size
self.records = records
self.total_count = total_count
self.total_page = total_page
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_no is not None:
result['PageNo'] = self.page_no
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.total_page is not None:
result['TotalPage'] = self.total_page
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNo') is not None:
self.page_no = m.get('PageNo')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = ListPersonsResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('TotalPage') is not None:
self.total_page = m.get('TotalPage')
return self
class ListPersonsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: ListPersonsResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = ListPersonsResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ListPersonsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListPersonsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListPersonsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListUserGroupsRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
isv_sub_id: str = None,
):
self.corp_id = corp_id
self.isv_sub_id = isv_sub_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
return self
class ListUserGroupsResponseBodyData(TeaModel):
def __init__(
self,
create_time: str = None,
creator: str = None,
isv_sub_id: str = None,
parent_user_group_id: int = None,
update_time: str = None,
user_count: int = None,
user_group_id: int = None,
user_group_name: str = None,
):
self.create_time = create_time
self.creator = creator
self.isv_sub_id = isv_sub_id
self.parent_user_group_id = parent_user_group_id
self.update_time = update_time
self.user_count = user_count
self.user_group_id = user_group_id
self.user_group_name = user_group_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.create_time is not None:
result['CreateTime'] = self.create_time
if self.creator is not None:
result['Creator'] = self.creator
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.parent_user_group_id is not None:
result['ParentUserGroupId'] = self.parent_user_group_id
if self.update_time is not None:
result['UpdateTime'] = self.update_time
if self.user_count is not None:
result['UserCount'] = self.user_count
if self.user_group_id is not None:
result['UserGroupId'] = self.user_group_id
if self.user_group_name is not None:
result['UserGroupName'] = self.user_group_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CreateTime') is not None:
self.create_time = m.get('CreateTime')
if m.get('Creator') is not None:
self.creator = m.get('Creator')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('ParentUserGroupId') is not None:
self.parent_user_group_id = m.get('ParentUserGroupId')
if m.get('UpdateTime') is not None:
self.update_time = m.get('UpdateTime')
if m.get('UserCount') is not None:
self.user_count = m.get('UserCount')
if m.get('UserGroupId') is not None:
self.user_group_id = m.get('UserGroupId')
if m.get('UserGroupName') is not None:
self.user_group_name = m.get('UserGroupName')
return self
class ListUserGroupsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: List[ListUserGroupsResponseBodyData] = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
for k in self.data:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
result['Data'] = []
if self.data is not None:
for k in self.data:
result['Data'].append(k.to_map() if k else None)
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
self.data = []
if m.get('Data') is not None:
for k in m.get('Data'):
temp_model = ListUserGroupsResponseBodyData()
self.data.append(temp_model.from_map(k))
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ListUserGroupsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListUserGroupsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListUserGroupsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ListUsersRequest(TeaModel):
def __init__(
self,
address: str = None,
age: int = None,
attachment: str = None,
biz_id: str = None,
corp_id: str = None,
face_image_url: str = None,
gender: int = None,
id_number: str = None,
isv_sub_id: str = None,
matching_rate_threshold: str = None,
page_number: int = None,
page_size: int = None,
person_list: Dict[str, Any] = None,
phone_no: str = None,
plate_no: str = None,
user_group_id: int = None,
user_list: Dict[str, Any] = None,
user_name: str = None,
):
self.address = address
self.age = age
self.attachment = attachment
self.biz_id = biz_id
self.corp_id = corp_id
self.face_image_url = face_image_url
self.gender = gender
self.id_number = id_number
self.isv_sub_id = isv_sub_id
self.matching_rate_threshold = matching_rate_threshold
self.page_number = page_number
self.page_size = page_size
self.person_list = person_list
self.phone_no = phone_no
self.plate_no = plate_no
self.user_group_id = user_group_id
self.user_list = user_list
self.user_name = user_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.address is not None:
result['Address'] = self.address
if self.age is not None:
result['Age'] = self.age
if self.attachment is not None:
result['Attachment'] = self.attachment
if self.biz_id is not None:
result['BizId'] = self.biz_id
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.face_image_url is not None:
result['FaceImageUrl'] = self.face_image_url
if self.gender is not None:
result['Gender'] = self.gender
if self.id_number is not None:
result['IdNumber'] = self.id_number
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.matching_rate_threshold is not None:
result['MatchingRateThreshold'] = self.matching_rate_threshold
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.person_list is not None:
result['PersonList'] = self.person_list
if self.phone_no is not None:
result['PhoneNo'] = self.phone_no
if self.plate_no is not None:
result['PlateNo'] = self.plate_no
if self.user_group_id is not None:
result['UserGroupId'] = self.user_group_id
if self.user_list is not None:
result['UserList'] = self.user_list
if self.user_name is not None:
result['UserName'] = self.user_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Address') is not None:
self.address = m.get('Address')
if m.get('Age') is not None:
self.age = m.get('Age')
if m.get('Attachment') is not None:
self.attachment = m.get('Attachment')
if m.get('BizId') is not None:
self.biz_id = m.get('BizId')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('FaceImageUrl') is not None:
self.face_image_url = m.get('FaceImageUrl')
if m.get('Gender') is not None:
self.gender = m.get('Gender')
if m.get('IdNumber') is not None:
self.id_number = m.get('IdNumber')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('MatchingRateThreshold') is not None:
self.matching_rate_threshold = m.get('MatchingRateThreshold')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('PersonList') is not None:
self.person_list = m.get('PersonList')
if m.get('PhoneNo') is not None:
self.phone_no = m.get('PhoneNo')
if m.get('PlateNo') is not None:
self.plate_no = m.get('PlateNo')
if m.get('UserGroupId') is not None:
self.user_group_id = m.get('UserGroupId')
if m.get('UserList') is not None:
self.user_list = m.get('UserList')
if m.get('UserName') is not None:
self.user_name = m.get('UserName')
return self
class ListUsersShrinkRequest(TeaModel):
def __init__(
self,
address: str = None,
age: int = None,
attachment: str = None,
biz_id: str = None,
corp_id: str = None,
face_image_url: str = None,
gender: int = None,
id_number: str = None,
isv_sub_id: str = None,
matching_rate_threshold: str = None,
page_number: int = None,
page_size: int = None,
person_list_shrink: str = None,
phone_no: str = None,
plate_no: str = None,
user_group_id: int = None,
user_list_shrink: str = None,
user_name: str = None,
):
self.address = address
self.age = age
self.attachment = attachment
self.biz_id = biz_id
self.corp_id = corp_id
self.face_image_url = face_image_url
self.gender = gender
self.id_number = id_number
self.isv_sub_id = isv_sub_id
self.matching_rate_threshold = matching_rate_threshold
self.page_number = page_number
self.page_size = page_size
self.person_list_shrink = person_list_shrink
self.phone_no = phone_no
self.plate_no = plate_no
self.user_group_id = user_group_id
self.user_list_shrink = user_list_shrink
self.user_name = user_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.address is not None:
result['Address'] = self.address
if self.age is not None:
result['Age'] = self.age
if self.attachment is not None:
result['Attachment'] = self.attachment
if self.biz_id is not None:
result['BizId'] = self.biz_id
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.face_image_url is not None:
result['FaceImageUrl'] = self.face_image_url
if self.gender is not None:
result['Gender'] = self.gender
if self.id_number is not None:
result['IdNumber'] = self.id_number
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.matching_rate_threshold is not None:
result['MatchingRateThreshold'] = self.matching_rate_threshold
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.person_list_shrink is not None:
result['PersonList'] = self.person_list_shrink
if self.phone_no is not None:
result['PhoneNo'] = self.phone_no
if self.plate_no is not None:
result['PlateNo'] = self.plate_no
if self.user_group_id is not None:
result['UserGroupId'] = self.user_group_id
if self.user_list_shrink is not None:
result['UserList'] = self.user_list_shrink
if self.user_name is not None:
result['UserName'] = self.user_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Address') is not None:
self.address = m.get('Address')
if m.get('Age') is not None:
self.age = m.get('Age')
if m.get('Attachment') is not None:
self.attachment = m.get('Attachment')
if m.get('BizId') is not None:
self.biz_id = m.get('BizId')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('FaceImageUrl') is not None:
self.face_image_url = m.get('FaceImageUrl')
if m.get('Gender') is not None:
self.gender = m.get('Gender')
if m.get('IdNumber') is not None:
self.id_number = m.get('IdNumber')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('MatchingRateThreshold') is not None:
self.matching_rate_threshold = m.get('MatchingRateThreshold')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('PersonList') is not None:
self.person_list_shrink = m.get('PersonList')
if m.get('PhoneNo') is not None:
self.phone_no = m.get('PhoneNo')
if m.get('PlateNo') is not None:
self.plate_no = m.get('PlateNo')
if m.get('UserGroupId') is not None:
self.user_group_id = m.get('UserGroupId')
if m.get('UserList') is not None:
self.user_list_shrink = m.get('UserList')
if m.get('UserName') is not None:
self.user_name = m.get('UserName')
return self
class ListUsersResponseBodyDataRecords(TeaModel):
def __init__(
self,
age: str = None,
attachment: str = None,
biz_id: str = None,
face_image_url: str = None,
gender: str = None,
id_number: str = None,
isv_sub_id: str = None,
matching_rate: str = None,
person_id: str = None,
user_group_id: int = None,
user_id: int = None,
user_name: str = None,
):
self.age = age
self.attachment = attachment
self.biz_id = biz_id
self.face_image_url = face_image_url
self.gender = gender
self.id_number = id_number
self.isv_sub_id = isv_sub_id
self.matching_rate = matching_rate
self.person_id = person_id
self.user_group_id = user_group_id
self.user_id = user_id
self.user_name = user_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.age is not None:
result['Age'] = self.age
if self.attachment is not None:
result['Attachment'] = self.attachment
if self.biz_id is not None:
result['BizId'] = self.biz_id
if self.face_image_url is not None:
result['FaceImageUrl'] = self.face_image_url
if self.gender is not None:
result['Gender'] = self.gender
if self.id_number is not None:
result['IdNumber'] = self.id_number
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.matching_rate is not None:
result['MatchingRate'] = self.matching_rate
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.user_group_id is not None:
result['UserGroupId'] = self.user_group_id
if self.user_id is not None:
result['UserId'] = self.user_id
if self.user_name is not None:
result['UserName'] = self.user_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Age') is not None:
self.age = m.get('Age')
if m.get('Attachment') is not None:
self.attachment = m.get('Attachment')
if m.get('BizId') is not None:
self.biz_id = m.get('BizId')
if m.get('FaceImageUrl') is not None:
self.face_image_url = m.get('FaceImageUrl')
if m.get('Gender') is not None:
self.gender = m.get('Gender')
if m.get('IdNumber') is not None:
self.id_number = m.get('IdNumber')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('MatchingRate') is not None:
self.matching_rate = m.get('MatchingRate')
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('UserGroupId') is not None:
self.user_group_id = m.get('UserGroupId')
if m.get('UserId') is not None:
self.user_id = m.get('UserId')
if m.get('UserName') is not None:
self.user_name = m.get('UserName')
return self
class ListUsersResponseBodyData(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
records: List[ListUsersResponseBodyDataRecords] = None,
success: int = None,
total: int = None,
):
self.page_number = page_number
self.page_size = page_size
self.records = records
self.success = success
self.total = total
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.success is not None:
result['Success'] = self.success
if self.total is not None:
result['Total'] = self.total
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = ListUsersResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('Success') is not None:
self.success = m.get('Success')
if m.get('Total') is not None:
self.total = m.get('Total')
return self
class ListUsersResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: ListUsersResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = ListUsersResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ListUsersResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ListUsersResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ListUsersResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyDeviceRequest(TeaModel):
def __init__(
self,
audio_enable: str = None,
device_address: str = None,
device_direction: str = None,
device_id: str = None,
device_ip: str = None,
device_model: str = None,
device_name: str = None,
device_rate: str = None,
device_resolution: str = None,
device_site: str = None,
device_sub_type: str = None,
encode_format: str = None,
frame_rate: str = None,
gov_length: str = None,
latitude: str = None,
longitude: str = None,
osdtime_enable: str = None,
osdtime_type: str = None,
osdtime_x: str = None,
osdtime_y: str = None,
password: str = None,
vendor: str = None,
):
self.audio_enable = audio_enable
# 设备安装地址
self.device_address = device_address
self.device_direction = device_direction
# 设备国标编码
self.device_id = device_id
self.device_ip = device_ip
self.device_model = device_model
# 设备名称
self.device_name = device_name
self.device_rate = device_rate
self.device_resolution = device_resolution
self.device_site = device_site
self.device_sub_type = device_sub_type
self.encode_format = encode_format
self.frame_rate = frame_rate
self.gov_length = gov_length
self.latitude = latitude
self.longitude = longitude
self.osdtime_enable = osdtime_enable
self.osdtime_type = osdtime_type
self.osdtime_x = osdtime_x
self.osdtime_y = osdtime_y
self.password = password
self.vendor = vendor
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.audio_enable is not None:
result['AudioEnable'] = self.audio_enable
if self.device_address is not None:
result['DeviceAddress'] = self.device_address
if self.device_direction is not None:
result['DeviceDirection'] = self.device_direction
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.device_ip is not None:
result['DeviceIp'] = self.device_ip
if self.device_model is not None:
result['DeviceModel'] = self.device_model
if self.device_name is not None:
result['DeviceName'] = self.device_name
if self.device_rate is not None:
result['DeviceRate'] = self.device_rate
if self.device_resolution is not None:
result['DeviceResolution'] = self.device_resolution
if self.device_site is not None:
result['DeviceSite'] = self.device_site
if self.device_sub_type is not None:
result['DeviceSubType'] = self.device_sub_type
if self.encode_format is not None:
result['EncodeFormat'] = self.encode_format
if self.frame_rate is not None:
result['FrameRate'] = self.frame_rate
if self.gov_length is not None:
result['GovLength'] = self.gov_length
if self.latitude is not None:
result['Latitude'] = self.latitude
if self.longitude is not None:
result['Longitude'] = self.longitude
if self.osdtime_enable is not None:
result['OSDTimeEnable'] = self.osdtime_enable
if self.osdtime_type is not None:
result['OSDTimeType'] = self.osdtime_type
if self.osdtime_x is not None:
result['OSDTimeX'] = self.osdtime_x
if self.osdtime_y is not None:
result['OSDTimeY'] = self.osdtime_y
if self.password is not None:
result['Password'] = self.password
if self.vendor is not None:
result['Vendor'] = self.vendor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AudioEnable') is not None:
self.audio_enable = m.get('AudioEnable')
if m.get('DeviceAddress') is not None:
self.device_address = m.get('DeviceAddress')
if m.get('DeviceDirection') is not None:
self.device_direction = m.get('DeviceDirection')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('DeviceIp') is not None:
self.device_ip = m.get('DeviceIp')
if m.get('DeviceModel') is not None:
self.device_model = m.get('DeviceModel')
if m.get('DeviceName') is not None:
self.device_name = m.get('DeviceName')
if m.get('DeviceRate') is not None:
self.device_rate = m.get('DeviceRate')
if m.get('DeviceResolution') is not None:
self.device_resolution = m.get('DeviceResolution')
if m.get('DeviceSite') is not None:
self.device_site = m.get('DeviceSite')
if m.get('DeviceSubType') is not None:
self.device_sub_type = m.get('DeviceSubType')
if m.get('EncodeFormat') is not None:
self.encode_format = m.get('EncodeFormat')
if m.get('FrameRate') is not None:
self.frame_rate = m.get('FrameRate')
if m.get('GovLength') is not None:
self.gov_length = m.get('GovLength')
if m.get('Latitude') is not None:
self.latitude = m.get('Latitude')
if m.get('Longitude') is not None:
self.longitude = m.get('Longitude')
if m.get('OSDTimeEnable') is not None:
self.osdtime_enable = m.get('OSDTimeEnable')
if m.get('OSDTimeType') is not None:
self.osdtime_type = m.get('OSDTimeType')
if m.get('OSDTimeX') is not None:
self.osdtime_x = m.get('OSDTimeX')
if m.get('OSDTimeY') is not None:
self.osdtime_y = m.get('OSDTimeY')
if m.get('Password') is not None:
self.password = m.get('Password')
if m.get('Vendor') is not None:
self.vendor = m.get('Vendor')
return self
class ModifyDeviceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
):
# 错误码
self.code = code
# 错误码描述
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyDeviceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ModifyDeviceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ModifyDeviceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class PeekNvrRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
device_id: str = None,
):
self.corp_id = corp_id
self.device_id = device_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.device_id is not None:
result['DeviceId'] = self.device_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
return self
class PeekNvrResponseBodyDataSubDeviceList(TeaModel):
def __init__(
self,
associated_platform: str = None,
sub_device_id: str = None,
sub_device_name: str = None,
sub_device_num: str = None,
sub_device_status: str = None,
):
self.associated_platform = associated_platform
self.sub_device_id = sub_device_id
self.sub_device_name = sub_device_name
self.sub_device_num = sub_device_num
self.sub_device_status = sub_device_status
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.associated_platform is not None:
result['AssociatedPlatform'] = self.associated_platform
if self.sub_device_id is not None:
result['SubDeviceId'] = self.sub_device_id
if self.sub_device_name is not None:
result['SubDeviceName'] = self.sub_device_name
if self.sub_device_num is not None:
result['SubDeviceNum'] = self.sub_device_num
if self.sub_device_status is not None:
result['SubDeviceStatus'] = self.sub_device_status
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AssociatedPlatform') is not None:
self.associated_platform = m.get('AssociatedPlatform')
if m.get('SubDeviceId') is not None:
self.sub_device_id = m.get('SubDeviceId')
if m.get('SubDeviceName') is not None:
self.sub_device_name = m.get('SubDeviceName')
if m.get('SubDeviceNum') is not None:
self.sub_device_num = m.get('SubDeviceNum')
if m.get('SubDeviceStatus') is not None:
self.sub_device_status = m.get('SubDeviceStatus')
return self
class PeekNvrResponseBodyData(TeaModel):
def __init__(
self,
idle_count: int = None,
sub_device_list: List[PeekNvrResponseBodyDataSubDeviceList] = None,
):
self.idle_count = idle_count
self.sub_device_list = sub_device_list
def validate(self):
if self.sub_device_list:
for k in self.sub_device_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.idle_count is not None:
result['IdleCount'] = self.idle_count
result['SubDeviceList'] = []
if self.sub_device_list is not None:
for k in self.sub_device_list:
result['SubDeviceList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('IdleCount') is not None:
self.idle_count = m.get('IdleCount')
self.sub_device_list = []
if m.get('SubDeviceList') is not None:
for k in m.get('SubDeviceList'):
temp_model = PeekNvrResponseBodyDataSubDeviceList()
self.sub_device_list.append(temp_model.from_map(k))
return self
class PeekNvrResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: PeekNvrResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = PeekNvrResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class PeekNvrResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: PeekNvrResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = PeekNvrResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class RaiseDevicesStorageRequest(TeaModel):
def __init__(
self,
json: str = None,
):
self.json = json
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.json is not None:
result['Json'] = self.json
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Json') is not None:
self.json = m.get('Json')
return self
class RaiseDevicesStorageResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: bool = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class RaiseDevicesStorageResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: RaiseDevicesStorageResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = RaiseDevicesStorageResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class RecognizeFaceQualityRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
pic_content: str = None,
pic_format: str = None,
pic_url: str = None,
):
self.corp_id = corp_id
self.pic_content = pic_content
self.pic_format = pic_format
self.pic_url = pic_url
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.pic_content is not None:
result['PicContent'] = self.pic_content
if self.pic_format is not None:
result['PicFormat'] = self.pic_format
if self.pic_url is not None:
result['PicUrl'] = self.pic_url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('PicContent') is not None:
self.pic_content = m.get('PicContent')
if m.get('PicFormat') is not None:
self.pic_format = m.get('PicFormat')
if m.get('PicUrl') is not None:
self.pic_url = m.get('PicUrl')
return self
class RecognizeFaceQualityResponseBodyDataAttributes(TeaModel):
def __init__(
self,
face_quality: str = None,
face_score: str = None,
face_style: str = None,
left_top_x: int = None,
left_top_y: int = None,
right_bottom_x: int = None,
right_bottom_y: int = None,
target_image_storage_path: str = None,
):
self.face_quality = face_quality
self.face_score = face_score
self.face_style = face_style
self.left_top_x = left_top_x
self.left_top_y = left_top_y
self.right_bottom_x = right_bottom_x
self.right_bottom_y = right_bottom_y
self.target_image_storage_path = target_image_storage_path
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.face_quality is not None:
result['FaceQuality'] = self.face_quality
if self.face_score is not None:
result['FaceScore'] = self.face_score
if self.face_style is not None:
result['FaceStyle'] = self.face_style
if self.left_top_x is not None:
result['LeftTopX'] = self.left_top_x
if self.left_top_y is not None:
result['LeftTopY'] = self.left_top_y
if self.right_bottom_x is not None:
result['RightBottomX'] = self.right_bottom_x
if self.right_bottom_y is not None:
result['RightBottomY'] = self.right_bottom_y
if self.target_image_storage_path is not None:
result['TargetImageStoragePath'] = self.target_image_storage_path
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('FaceQuality') is not None:
self.face_quality = m.get('FaceQuality')
if m.get('FaceScore') is not None:
self.face_score = m.get('FaceScore')
if m.get('FaceStyle') is not None:
self.face_style = m.get('FaceStyle')
if m.get('LeftTopX') is not None:
self.left_top_x = m.get('LeftTopX')
if m.get('LeftTopY') is not None:
self.left_top_y = m.get('LeftTopY')
if m.get('RightBottomX') is not None:
self.right_bottom_x = m.get('RightBottomX')
if m.get('RightBottomY') is not None:
self.right_bottom_y = m.get('RightBottomY')
if m.get('TargetImageStoragePath') is not None:
self.target_image_storage_path = m.get('TargetImageStoragePath')
return self
class RecognizeFaceQualityResponseBodyData(TeaModel):
def __init__(
self,
attributes: RecognizeFaceQualityResponseBodyDataAttributes = None,
description: str = None,
quality_score: str = None,
):
self.attributes = attributes
self.description = description
self.quality_score = quality_score
def validate(self):
if self.attributes:
self.attributes.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.attributes is not None:
result['Attributes'] = self.attributes.to_map()
if self.description is not None:
result['Description'] = self.description
if self.quality_score is not None:
result['QualityScore'] = self.quality_score
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Attributes') is not None:
temp_model = RecognizeFaceQualityResponseBodyDataAttributes()
self.attributes = temp_model.from_map(m['Attributes'])
if m.get('Description') is not None:
self.description = m.get('Description')
if m.get('QualityScore') is not None:
self.quality_score = m.get('QualityScore')
return self
class RecognizeFaceQualityResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: RecognizeFaceQualityResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = RecognizeFaceQualityResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class RecognizeFaceQualityResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: RecognizeFaceQualityResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = RecognizeFaceQualityResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class RecognizeImageRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
pic_content: str = None,
pic_format: str = None,
pic_url: str = None,
):
self.corp_id = corp_id
self.pic_content = pic_content
self.pic_format = pic_format
self.pic_url = pic_url
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.pic_content is not None:
result['PicContent'] = self.pic_content
if self.pic_format is not None:
result['PicFormat'] = self.pic_format
if self.pic_url is not None:
result['PicUrl'] = self.pic_url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('PicContent') is not None:
self.pic_content = m.get('PicContent')
if m.get('PicFormat') is not None:
self.pic_format = m.get('PicFormat')
if m.get('PicUrl') is not None:
self.pic_url = m.get('PicUrl')
return self
class RecognizeImageResponseBodyDataBodyList(TeaModel):
def __init__(
self,
feature: str = None,
file_name: str = None,
image_base_six_four: str = None,
left_top_x: str = None,
left_top_y: str = None,
local_feature: str = None,
respirator_color: str = None,
right_bottom_x: str = None,
right_bottom_y: str = None,
):
self.feature = feature
self.file_name = file_name
self.image_base_six_four = image_base_six_four
self.left_top_x = left_top_x
self.left_top_y = left_top_y
self.local_feature = local_feature
self.respirator_color = respirator_color
self.right_bottom_x = right_bottom_x
self.right_bottom_y = right_bottom_y
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.feature is not None:
result['Feature'] = self.feature
if self.file_name is not None:
result['FileName'] = self.file_name
if self.image_base_six_four is not None:
result['ImageBaseSixFour'] = self.image_base_six_four
if self.left_top_x is not None:
result['LeftTopX'] = self.left_top_x
if self.left_top_y is not None:
result['LeftTopY'] = self.left_top_y
if self.local_feature is not None:
result['LocalFeature'] = self.local_feature
if self.respirator_color is not None:
result['RespiratorColor'] = self.respirator_color
if self.right_bottom_x is not None:
result['RightBottomX'] = self.right_bottom_x
if self.right_bottom_y is not None:
result['RightBottomY'] = self.right_bottom_y
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Feature') is not None:
self.feature = m.get('Feature')
if m.get('FileName') is not None:
self.file_name = m.get('FileName')
if m.get('ImageBaseSixFour') is not None:
self.image_base_six_four = m.get('ImageBaseSixFour')
if m.get('LeftTopX') is not None:
self.left_top_x = m.get('LeftTopX')
if m.get('LeftTopY') is not None:
self.left_top_y = m.get('LeftTopY')
if m.get('LocalFeature') is not None:
self.local_feature = m.get('LocalFeature')
if m.get('RespiratorColor') is not None:
self.respirator_color = m.get('RespiratorColor')
if m.get('RightBottomX') is not None:
self.right_bottom_x = m.get('RightBottomX')
if m.get('RightBottomY') is not None:
self.right_bottom_y = m.get('RightBottomY')
return self
class RecognizeImageResponseBodyDataFaceList(TeaModel):
def __init__(
self,
feature: str = None,
file_name: str = None,
image_base_six_four: str = None,
key_point_quality: float = None,
left_top_x: str = None,
left_top_y: str = None,
local_feature: str = None,
quality: float = None,
respirator_color: str = None,
right_bottom_x: str = None,
right_bottom_y: str = None,
):
self.feature = feature
self.file_name = file_name
self.image_base_six_four = image_base_six_four
self.key_point_quality = key_point_quality
self.left_top_x = left_top_x
self.left_top_y = left_top_y
self.local_feature = local_feature
self.quality = quality
self.respirator_color = respirator_color
self.right_bottom_x = right_bottom_x
self.right_bottom_y = right_bottom_y
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.feature is not None:
result['Feature'] = self.feature
if self.file_name is not None:
result['FileName'] = self.file_name
if self.image_base_six_four is not None:
result['ImageBaseSixFour'] = self.image_base_six_four
if self.key_point_quality is not None:
result['KeyPointQuality'] = self.key_point_quality
if self.left_top_x is not None:
result['LeftTopX'] = self.left_top_x
if self.left_top_y is not None:
result['LeftTopY'] = self.left_top_y
if self.local_feature is not None:
result['LocalFeature'] = self.local_feature
if self.quality is not None:
result['Quality'] = self.quality
if self.respirator_color is not None:
result['RespiratorColor'] = self.respirator_color
if self.right_bottom_x is not None:
result['RightBottomX'] = self.right_bottom_x
if self.right_bottom_y is not None:
result['RightBottomY'] = self.right_bottom_y
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Feature') is not None:
self.feature = m.get('Feature')
if m.get('FileName') is not None:
self.file_name = m.get('FileName')
if m.get('ImageBaseSixFour') is not None:
self.image_base_six_four = m.get('ImageBaseSixFour')
if m.get('KeyPointQuality') is not None:
self.key_point_quality = m.get('KeyPointQuality')
if m.get('LeftTopX') is not None:
self.left_top_x = m.get('LeftTopX')
if m.get('LeftTopY') is not None:
self.left_top_y = m.get('LeftTopY')
if m.get('LocalFeature') is not None:
self.local_feature = m.get('LocalFeature')
if m.get('Quality') is not None:
self.quality = m.get('Quality')
if m.get('RespiratorColor') is not None:
self.respirator_color = m.get('RespiratorColor')
if m.get('RightBottomX') is not None:
self.right_bottom_x = m.get('RightBottomX')
if m.get('RightBottomY') is not None:
self.right_bottom_y = m.get('RightBottomY')
return self
class RecognizeImageResponseBodyData(TeaModel):
def __init__(
self,
body_list: List[RecognizeImageResponseBodyDataBodyList] = None,
face_list: List[RecognizeImageResponseBodyDataFaceList] = None,
):
self.body_list = body_list
self.face_list = face_list
def validate(self):
if self.body_list:
for k in self.body_list:
if k:
k.validate()
if self.face_list:
for k in self.face_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['BodyList'] = []
if self.body_list is not None:
for k in self.body_list:
result['BodyList'].append(k.to_map() if k else None)
result['FaceList'] = []
if self.face_list is not None:
for k in self.face_list:
result['FaceList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
self.body_list = []
if m.get('BodyList') is not None:
for k in m.get('BodyList'):
temp_model = RecognizeImageResponseBodyDataBodyList()
self.body_list.append(temp_model.from_map(k))
self.face_list = []
if m.get('FaceList') is not None:
for k in m.get('FaceList'):
temp_model = RecognizeImageResponseBodyDataFaceList()
self.face_list.append(temp_model.from_map(k))
return self
class RecognizeImageResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: RecognizeImageResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = RecognizeImageResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class RecognizeImageResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: RecognizeImageResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = RecognizeImageResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class RegisterDeviceRequest(TeaModel):
def __init__(
self,
device_id: str = None,
device_sn: str = None,
device_time_stamp: str = None,
server_id: str = None,
):
self.device_id = device_id
self.device_sn = device_sn
self.device_time_stamp = device_time_stamp
self.server_id = server_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.device_sn is not None:
result['DeviceSn'] = self.device_sn
if self.device_time_stamp is not None:
result['DeviceTimeStamp'] = self.device_time_stamp
if self.server_id is not None:
result['ServerId'] = self.server_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('DeviceSn') is not None:
self.device_sn = m.get('DeviceSn')
if m.get('DeviceTimeStamp') is not None:
self.device_time_stamp = m.get('DeviceTimeStamp')
if m.get('ServerId') is not None:
self.server_id = m.get('ServerId')
return self
class RegisterDeviceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
retry_interval: str = None,
):
self.code = code
self.message = message
self.request_id = request_id
self.retry_interval = retry_interval
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.retry_interval is not None:
result['RetryInterval'] = self.retry_interval
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('RetryInterval') is not None:
self.retry_interval = m.get('RetryInterval')
return self
class RegisterDeviceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: RegisterDeviceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = RegisterDeviceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class RemoveCameraForInstanceRequest(TeaModel):
def __init__(
self,
camera_ids: List[str] = None,
instance_id: str = None,
):
# 设备ID列表
self.camera_ids = camera_ids
# 实例id
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.camera_ids is not None:
result['CameraIds'] = self.camera_ids
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CameraIds') is not None:
self.camera_ids = m.get('CameraIds')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class RemoveCameraForInstanceShrinkRequest(TeaModel):
def __init__(
self,
camera_ids_shrink: str = None,
instance_id: str = None,
):
# 设备ID列表
self.camera_ids_shrink = camera_ids_shrink
# 实例id
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.camera_ids_shrink is not None:
result['CameraIds'] = self.camera_ids_shrink
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CameraIds') is not None:
self.camera_ids_shrink = m.get('CameraIds')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class RemoveCameraForInstanceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
):
# code码
self.code = code
# 返回结果消息
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class RemoveCameraForInstanceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: RemoveCameraForInstanceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = RemoveCameraForInstanceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class RemoveSearchItemsRequest(TeaModel):
def __init__(
self,
search_item_ids: str = None,
search_table_id: str = None,
):
self.search_item_ids = search_item_ids
self.search_table_id = search_table_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.search_item_ids is not None:
result['SearchItemIds'] = self.search_item_ids
if self.search_table_id is not None:
result['SearchTableId'] = self.search_table_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('SearchItemIds') is not None:
self.search_item_ids = m.get('SearchItemIds')
if m.get('SearchTableId') is not None:
self.search_table_id = m.get('SearchTableId')
return self
class RemoveSearchItemsResponseBodyData(TeaModel):
def __init__(
self,
deleted_item_ids: str = None,
failed_item_ids: str = None,
):
self.deleted_item_ids = deleted_item_ids
self.failed_item_ids = failed_item_ids
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.deleted_item_ids is not None:
result['DeletedItemIds'] = self.deleted_item_ids
if self.failed_item_ids is not None:
result['FailedItemIds'] = self.failed_item_ids
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeletedItemIds') is not None:
self.deleted_item_ids = m.get('DeletedItemIds')
if m.get('FailedItemIds') is not None:
self.failed_item_ids = m.get('FailedItemIds')
return self
class RemoveSearchItemsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: RemoveSearchItemsResponseBodyData = None,
message: str = None,
request_id: str = None,
success: str = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = RemoveSearchItemsResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class RemoveSearchItemsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: RemoveSearchItemsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = RemoveSearchItemsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class RemoveWatchItemsRequest(TeaModel):
def __init__(
self,
watch_item_ids: str = None,
watch_policy_id: str = None,
):
self.watch_item_ids = watch_item_ids
self.watch_policy_id = watch_policy_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.watch_item_ids is not None:
result['WatchItemIds'] = self.watch_item_ids
if self.watch_policy_id is not None:
result['WatchPolicyId'] = self.watch_policy_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('WatchItemIds') is not None:
self.watch_item_ids = m.get('WatchItemIds')
if m.get('WatchPolicyId') is not None:
self.watch_policy_id = m.get('WatchPolicyId')
return self
class RemoveWatchItemsResponseBodyData(TeaModel):
def __init__(
self,
deleted_item_ids: str = None,
failed_item_ids: str = None,
):
self.deleted_item_ids = deleted_item_ids
self.failed_item_ids = failed_item_ids
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.deleted_item_ids is not None:
result['DeletedItemIds'] = self.deleted_item_ids
if self.failed_item_ids is not None:
result['FailedItemIds'] = self.failed_item_ids
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeletedItemIds') is not None:
self.deleted_item_ids = m.get('DeletedItemIds')
if m.get('FailedItemIds') is not None:
self.failed_item_ids = m.get('FailedItemIds')
return self
class RemoveWatchItemsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: RemoveWatchItemsResponseBodyData = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = RemoveWatchItemsResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class RemoveWatchItemsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: RemoveWatchItemsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = RemoveWatchItemsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class SaveVideoSummaryTaskVideoRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
save_video: bool = None,
task_id: int = None,
):
self.corp_id = corp_id
self.save_video = save_video
self.task_id = task_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.save_video is not None:
result['SaveVideo'] = self.save_video
if self.task_id is not None:
result['TaskId'] = self.task_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('SaveVideo') is not None:
self.save_video = m.get('SaveVideo')
if m.get('TaskId') is not None:
self.task_id = m.get('TaskId')
return self
class SaveVideoSummaryTaskVideoResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class SaveVideoSummaryTaskVideoResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: SaveVideoSummaryTaskVideoResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = SaveVideoSummaryTaskVideoResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ScanSubDeviceRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
device_id: str = None,
):
self.corp_id = corp_id
self.device_id = device_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.device_id is not None:
result['DeviceId'] = self.device_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
return self
class ScanSubDeviceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ScanSubDeviceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: ScanSubDeviceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = ScanSubDeviceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class SearchBodyRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
end_time_stamp: int = None,
gb_id: str = None,
option_list: Dict[str, Any] = None,
page_no: int = None,
page_size: int = None,
start_time_stamp: int = None,
):
self.corp_id = corp_id
self.end_time_stamp = end_time_stamp
self.gb_id = gb_id
self.option_list = option_list
self.page_no = page_no
self.page_size = page_size
self.start_time_stamp = start_time_stamp
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.end_time_stamp is not None:
result['EndTimeStamp'] = self.end_time_stamp
if self.gb_id is not None:
result['GbId'] = self.gb_id
if self.option_list is not None:
result['OptionList'] = self.option_list
if self.page_no is not None:
result['PageNo'] = self.page_no
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.start_time_stamp is not None:
result['StartTimeStamp'] = self.start_time_stamp
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('EndTimeStamp') is not None:
self.end_time_stamp = m.get('EndTimeStamp')
if m.get('GbId') is not None:
self.gb_id = m.get('GbId')
if m.get('OptionList') is not None:
self.option_list = m.get('OptionList')
if m.get('PageNo') is not None:
self.page_no = m.get('PageNo')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('StartTimeStamp') is not None:
self.start_time_stamp = m.get('StartTimeStamp')
return self
class SearchBodyShrinkRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
end_time_stamp: int = None,
gb_id: str = None,
option_list_shrink: str = None,
page_no: int = None,
page_size: int = None,
start_time_stamp: int = None,
):
self.corp_id = corp_id
self.end_time_stamp = end_time_stamp
self.gb_id = gb_id
self.option_list_shrink = option_list_shrink
self.page_no = page_no
self.page_size = page_size
self.start_time_stamp = start_time_stamp
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.end_time_stamp is not None:
result['EndTimeStamp'] = self.end_time_stamp
if self.gb_id is not None:
result['GbId'] = self.gb_id
if self.option_list_shrink is not None:
result['OptionList'] = self.option_list_shrink
if self.page_no is not None:
result['PageNo'] = self.page_no
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.start_time_stamp is not None:
result['StartTimeStamp'] = self.start_time_stamp
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('EndTimeStamp') is not None:
self.end_time_stamp = m.get('EndTimeStamp')
if m.get('GbId') is not None:
self.gb_id = m.get('GbId')
if m.get('OptionList') is not None:
self.option_list_shrink = m.get('OptionList')
if m.get('PageNo') is not None:
self.page_no = m.get('PageNo')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('StartTimeStamp') is not None:
self.start_time_stamp = m.get('StartTimeStamp')
return self
class SearchBodyResponseBodyDataRecords(TeaModel):
def __init__(
self,
gb_id: str = None,
image_url: str = None,
left_top_x: float = None,
left_top_y: float = None,
right_bottom_x: float = None,
right_bottom_y: float = None,
score: float = None,
target_image_url: str = None,
):
self.gb_id = gb_id
self.image_url = image_url
self.left_top_x = left_top_x
self.left_top_y = left_top_y
self.right_bottom_x = right_bottom_x
self.right_bottom_y = right_bottom_y
self.score = score
self.target_image_url = target_image_url
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.gb_id is not None:
result['GbId'] = self.gb_id
if self.image_url is not None:
result['ImageUrl'] = self.image_url
if self.left_top_x is not None:
result['LeftTopX'] = self.left_top_x
if self.left_top_y is not None:
result['LeftTopY'] = self.left_top_y
if self.right_bottom_x is not None:
result['RightBottomX'] = self.right_bottom_x
if self.right_bottom_y is not None:
result['RightBottomY'] = self.right_bottom_y
if self.score is not None:
result['Score'] = self.score
if self.target_image_url is not None:
result['TargetImageUrl'] = self.target_image_url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('GbId') is not None:
self.gb_id = m.get('GbId')
if m.get('ImageUrl') is not None:
self.image_url = m.get('ImageUrl')
if m.get('LeftTopX') is not None:
self.left_top_x = m.get('LeftTopX')
if m.get('LeftTopY') is not None:
self.left_top_y = m.get('LeftTopY')
if m.get('RightBottomX') is not None:
self.right_bottom_x = m.get('RightBottomX')
if m.get('RightBottomY') is not None:
self.right_bottom_y = m.get('RightBottomY')
if m.get('Score') is not None:
self.score = m.get('Score')
if m.get('TargetImageUrl') is not None:
self.target_image_url = m.get('TargetImageUrl')
return self
class SearchBodyResponseBodyData(TeaModel):
def __init__(
self,
page_no: int = None,
page_size: int = None,
records: List[SearchBodyResponseBodyDataRecords] = None,
total_count: int = None,
total_page: int = None,
):
self.page_no = page_no
self.page_size = page_size
self.records = records
self.total_count = total_count
self.total_page = total_page
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_no is not None:
result['PageNo'] = self.page_no
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.total_page is not None:
result['TotalPage'] = self.total_page
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNo') is not None:
self.page_no = m.get('PageNo')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = SearchBodyResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('TotalPage') is not None:
self.total_page = m.get('TotalPage')
return self
class SearchBodyResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: SearchBodyResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = SearchBodyResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class SearchBodyResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: SearchBodyResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = SearchBodyResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class SearchFaceRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
end_time_stamp: int = None,
gb_id: str = None,
option_list: Dict[str, Any] = None,
page_no: int = None,
page_size: int = None,
start_time_stamp: int = None,
):
self.corp_id = corp_id
self.end_time_stamp = end_time_stamp
self.gb_id = gb_id
self.option_list = option_list
self.page_no = page_no
self.page_size = page_size
self.start_time_stamp = start_time_stamp
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.end_time_stamp is not None:
result['EndTimeStamp'] = self.end_time_stamp
if self.gb_id is not None:
result['GbId'] = self.gb_id
if self.option_list is not None:
result['OptionList'] = self.option_list
if self.page_no is not None:
result['PageNo'] = self.page_no
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.start_time_stamp is not None:
result['StartTimeStamp'] = self.start_time_stamp
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('EndTimeStamp') is not None:
self.end_time_stamp = m.get('EndTimeStamp')
if m.get('GbId') is not None:
self.gb_id = m.get('GbId')
if m.get('OptionList') is not None:
self.option_list = m.get('OptionList')
if m.get('PageNo') is not None:
self.page_no = m.get('PageNo')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('StartTimeStamp') is not None:
self.start_time_stamp = m.get('StartTimeStamp')
return self
class SearchFaceShrinkRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
end_time_stamp: int = None,
gb_id: str = None,
option_list_shrink: str = None,
page_no: int = None,
page_size: int = None,
start_time_stamp: int = None,
):
self.corp_id = corp_id
self.end_time_stamp = end_time_stamp
self.gb_id = gb_id
self.option_list_shrink = option_list_shrink
self.page_no = page_no
self.page_size = page_size
self.start_time_stamp = start_time_stamp
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.end_time_stamp is not None:
result['EndTimeStamp'] = self.end_time_stamp
if self.gb_id is not None:
result['GbId'] = self.gb_id
if self.option_list_shrink is not None:
result['OptionList'] = self.option_list_shrink
if self.page_no is not None:
result['PageNo'] = self.page_no
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.start_time_stamp is not None:
result['StartTimeStamp'] = self.start_time_stamp
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('EndTimeStamp') is not None:
self.end_time_stamp = m.get('EndTimeStamp')
if m.get('GbId') is not None:
self.gb_id = m.get('GbId')
if m.get('OptionList') is not None:
self.option_list_shrink = m.get('OptionList')
if m.get('PageNo') is not None:
self.page_no = m.get('PageNo')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('StartTimeStamp') is not None:
self.start_time_stamp = m.get('StartTimeStamp')
return self
class SearchFaceResponseBodyDataRecords(TeaModel):
def __init__(
self,
gb_id: str = None,
image_url: str = None,
left_top_x: float = None,
left_top_y: float = None,
match_suggestion: str = None,
right_bottom_x: float = None,
right_bottom_y: float = None,
score: float = None,
source_id: str = None,
target_image_url: str = None,
):
self.gb_id = gb_id
self.image_url = image_url
self.left_top_x = left_top_x
self.left_top_y = left_top_y
self.match_suggestion = match_suggestion
self.right_bottom_x = right_bottom_x
self.right_bottom_y = right_bottom_y
self.score = score
self.source_id = source_id
self.target_image_url = target_image_url
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.gb_id is not None:
result['GbId'] = self.gb_id
if self.image_url is not None:
result['ImageUrl'] = self.image_url
if self.left_top_x is not None:
result['LeftTopX'] = self.left_top_x
if self.left_top_y is not None:
result['LeftTopY'] = self.left_top_y
if self.match_suggestion is not None:
result['MatchSuggestion'] = self.match_suggestion
if self.right_bottom_x is not None:
result['RightBottomX'] = self.right_bottom_x
if self.right_bottom_y is not None:
result['RightBottomY'] = self.right_bottom_y
if self.score is not None:
result['Score'] = self.score
if self.source_id is not None:
result['SourceId'] = self.source_id
if self.target_image_url is not None:
result['TargetImageUrl'] = self.target_image_url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('GbId') is not None:
self.gb_id = m.get('GbId')
if m.get('ImageUrl') is not None:
self.image_url = m.get('ImageUrl')
if m.get('LeftTopX') is not None:
self.left_top_x = m.get('LeftTopX')
if m.get('LeftTopY') is not None:
self.left_top_y = m.get('LeftTopY')
if m.get('MatchSuggestion') is not None:
self.match_suggestion = m.get('MatchSuggestion')
if m.get('RightBottomX') is not None:
self.right_bottom_x = m.get('RightBottomX')
if m.get('RightBottomY') is not None:
self.right_bottom_y = m.get('RightBottomY')
if m.get('Score') is not None:
self.score = m.get('Score')
if m.get('SourceId') is not None:
self.source_id = m.get('SourceId')
if m.get('TargetImageUrl') is not None:
self.target_image_url = m.get('TargetImageUrl')
return self
class SearchFaceResponseBodyData(TeaModel):
def __init__(
self,
page_no: int = None,
page_size: int = None,
records: List[SearchFaceResponseBodyDataRecords] = None,
total_count: int = None,
total_page: int = None,
):
self.page_no = page_no
self.page_size = page_size
self.records = records
self.total_count = total_count
self.total_page = total_page
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_no is not None:
result['PageNo'] = self.page_no
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.total_page is not None:
result['TotalPage'] = self.total_page
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNo') is not None:
self.page_no = m.get('PageNo')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = SearchFaceResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('TotalPage') is not None:
self.total_page = m.get('TotalPage')
return self
class SearchFaceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: SearchFaceResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = SearchFaceResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class SearchFaceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: SearchFaceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = SearchFaceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class SearchItemRequest(TeaModel):
def __init__(
self,
item_image_data: str = None,
item_image_url: str = None,
page_number: int = None,
page_size: int = None,
search_table_ids: str = None,
similarity_threshold: float = None,
):
self.item_image_data = item_image_data
self.item_image_url = item_image_url
self.page_number = page_number
self.page_size = page_size
self.search_table_ids = search_table_ids
self.similarity_threshold = similarity_threshold
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.item_image_data is not None:
result['ItemImageData'] = self.item_image_data
if self.item_image_url is not None:
result['ItemImageUrl'] = self.item_image_url
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.search_table_ids is not None:
result['SearchTableIds'] = self.search_table_ids
if self.similarity_threshold is not None:
result['SimilarityThreshold'] = self.similarity_threshold
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ItemImageData') is not None:
self.item_image_data = m.get('ItemImageData')
if m.get('ItemImageUrl') is not None:
self.item_image_url = m.get('ItemImageUrl')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('SearchTableIds') is not None:
self.search_table_ids = m.get('SearchTableIds')
if m.get('SimilarityThreshold') is not None:
self.similarity_threshold = m.get('SimilarityThreshold')
return self
class SearchItemResponseBodyDataRecords(TeaModel):
def __init__(
self,
item_id: str = None,
item_image_similarity: float = None,
item_image_url: str = None,
item_name: str = None,
):
self.item_id = item_id
self.item_image_similarity = item_image_similarity
self.item_image_url = item_image_url
self.item_name = item_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.item_id is not None:
result['ItemId'] = self.item_id
if self.item_image_similarity is not None:
result['ItemImageSimilarity'] = self.item_image_similarity
if self.item_image_url is not None:
result['ItemImageUrl'] = self.item_image_url
if self.item_name is not None:
result['ItemName'] = self.item_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ItemId') is not None:
self.item_id = m.get('ItemId')
if m.get('ItemImageSimilarity') is not None:
self.item_image_similarity = m.get('ItemImageSimilarity')
if m.get('ItemImageUrl') is not None:
self.item_image_url = m.get('ItemImageUrl')
if m.get('ItemName') is not None:
self.item_name = m.get('ItemName')
return self
class SearchItemResponseBodyData(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
records: List[SearchItemResponseBodyDataRecords] = None,
total_count: int = None,
):
self.page_number = page_number
self.page_size = page_size
self.records = records
self.total_count = total_count
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = SearchItemResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
return self
class SearchItemResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: SearchItemResponseBodyData = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = SearchItemResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class SearchItemResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: SearchItemResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = SearchItemResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class SearchObjectRequest(TeaModel):
def __init__(
self,
algorithm_type: str = None,
conditions: Dict[str, Any] = None,
corp_id: str = None,
device_list: Dict[str, Any] = None,
end_time: int = None,
image_path: Dict[str, Any] = None,
object_type: str = None,
page_number: int = None,
page_size: int = None,
pic_url: str = None,
start_time: int = None,
):
self.algorithm_type = algorithm_type
self.conditions = conditions
self.corp_id = corp_id
self.device_list = device_list
self.end_time = end_time
self.image_path = image_path
self.object_type = object_type
self.page_number = page_number
self.page_size = page_size
self.pic_url = pic_url
self.start_time = start_time
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_type is not None:
result['AlgorithmType'] = self.algorithm_type
if self.conditions is not None:
result['Conditions'] = self.conditions
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.device_list is not None:
result['DeviceList'] = self.device_list
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.image_path is not None:
result['ImagePath'] = self.image_path
if self.object_type is not None:
result['ObjectType'] = self.object_type
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.pic_url is not None:
result['PicUrl'] = self.pic_url
if self.start_time is not None:
result['StartTime'] = self.start_time
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmType') is not None:
self.algorithm_type = m.get('AlgorithmType')
if m.get('Conditions') is not None:
self.conditions = m.get('Conditions')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DeviceList') is not None:
self.device_list = m.get('DeviceList')
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('ImagePath') is not None:
self.image_path = m.get('ImagePath')
if m.get('ObjectType') is not None:
self.object_type = m.get('ObjectType')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('PicUrl') is not None:
self.pic_url = m.get('PicUrl')
if m.get('StartTime') is not None:
self.start_time = m.get('StartTime')
return self
class SearchObjectShrinkRequest(TeaModel):
def __init__(
self,
algorithm_type: str = None,
conditions_shrink: str = None,
corp_id: str = None,
device_list_shrink: str = None,
end_time: int = None,
image_path_shrink: str = None,
object_type: str = None,
page_number: int = None,
page_size: int = None,
pic_url: str = None,
start_time: int = None,
):
self.algorithm_type = algorithm_type
self.conditions_shrink = conditions_shrink
self.corp_id = corp_id
self.device_list_shrink = device_list_shrink
self.end_time = end_time
self.image_path_shrink = image_path_shrink
self.object_type = object_type
self.page_number = page_number
self.page_size = page_size
self.pic_url = pic_url
self.start_time = start_time
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_type is not None:
result['AlgorithmType'] = self.algorithm_type
if self.conditions_shrink is not None:
result['Conditions'] = self.conditions_shrink
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.device_list_shrink is not None:
result['DeviceList'] = self.device_list_shrink
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.image_path_shrink is not None:
result['ImagePath'] = self.image_path_shrink
if self.object_type is not None:
result['ObjectType'] = self.object_type
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.pic_url is not None:
result['PicUrl'] = self.pic_url
if self.start_time is not None:
result['StartTime'] = self.start_time
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmType') is not None:
self.algorithm_type = m.get('AlgorithmType')
if m.get('Conditions') is not None:
self.conditions_shrink = m.get('Conditions')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DeviceList') is not None:
self.device_list_shrink = m.get('DeviceList')
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('ImagePath') is not None:
self.image_path_shrink = m.get('ImagePath')
if m.get('ObjectType') is not None:
self.object_type = m.get('ObjectType')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('PicUrl') is not None:
self.pic_url = m.get('PicUrl')
if m.get('StartTime') is not None:
self.start_time = m.get('StartTime')
return self
class SearchObjectResponseBodyDataRecords(TeaModel):
def __init__(
self,
compare_result: str = None,
device_id: str = None,
left_top_x: int = None,
left_top_y: int = None,
right_btm_x: int = None,
right_btm_y: int = None,
score: float = None,
shot_time: int = None,
source_id: str = None,
source_image_path: str = None,
source_image_url: str = None,
target_image_path: str = None,
target_image_url: str = None,
):
self.compare_result = compare_result
self.device_id = device_id
self.left_top_x = left_top_x
self.left_top_y = left_top_y
self.right_btm_x = right_btm_x
self.right_btm_y = right_btm_y
self.score = score
self.shot_time = shot_time
self.source_id = source_id
self.source_image_path = source_image_path
self.source_image_url = source_image_url
self.target_image_path = target_image_path
self.target_image_url = target_image_url
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.compare_result is not None:
result['CompareResult'] = self.compare_result
if self.device_id is not None:
result['DeviceID'] = self.device_id
if self.left_top_x is not None:
result['LeftTopX'] = self.left_top_x
if self.left_top_y is not None:
result['LeftTopY'] = self.left_top_y
if self.right_btm_x is not None:
result['RightBtmX'] = self.right_btm_x
if self.right_btm_y is not None:
result['RightBtmY'] = self.right_btm_y
if self.score is not None:
result['Score'] = self.score
if self.shot_time is not None:
result['ShotTime'] = self.shot_time
if self.source_id is not None:
result['SourceID'] = self.source_id
if self.source_image_path is not None:
result['SourceImagePath'] = self.source_image_path
if self.source_image_url is not None:
result['SourceImageUrl'] = self.source_image_url
if self.target_image_path is not None:
result['TargetImagePath'] = self.target_image_path
if self.target_image_url is not None:
result['TargetImageUrl'] = self.target_image_url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CompareResult') is not None:
self.compare_result = m.get('CompareResult')
if m.get('DeviceID') is not None:
self.device_id = m.get('DeviceID')
if m.get('LeftTopX') is not None:
self.left_top_x = m.get('LeftTopX')
if m.get('LeftTopY') is not None:
self.left_top_y = m.get('LeftTopY')
if m.get('RightBtmX') is not None:
self.right_btm_x = m.get('RightBtmX')
if m.get('RightBtmY') is not None:
self.right_btm_y = m.get('RightBtmY')
if m.get('Score') is not None:
self.score = m.get('Score')
if m.get('ShotTime') is not None:
self.shot_time = m.get('ShotTime')
if m.get('SourceID') is not None:
self.source_id = m.get('SourceID')
if m.get('SourceImagePath') is not None:
self.source_image_path = m.get('SourceImagePath')
if m.get('SourceImageUrl') is not None:
self.source_image_url = m.get('SourceImageUrl')
if m.get('TargetImagePath') is not None:
self.target_image_path = m.get('TargetImagePath')
if m.get('TargetImageUrl') is not None:
self.target_image_url = m.get('TargetImageUrl')
return self
class SearchObjectResponseBodyData(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
records: List[SearchObjectResponseBodyDataRecords] = None,
total_count: int = None,
total_page: int = None,
):
self.page_number = page_number
self.page_size = page_size
self.records = records
self.total_count = total_count
self.total_page = total_page
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.total_page is not None:
result['TotalPage'] = self.total_page
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = SearchObjectResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('TotalPage') is not None:
self.total_page = m.get('TotalPage')
return self
class SearchObjectResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: SearchObjectResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = SearchObjectResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class SearchObjectResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: SearchObjectResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = SearchObjectResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class SearchTargetRequest(TeaModel):
def __init__(
self,
begin_time: str = None,
corp_id: str = None,
device_list: str = None,
end_time: str = None,
model_id: str = None,
order_by: str = None,
page_number: int = None,
page_size: int = None,
similarity_threshold: float = None,
target_attributes: str = None,
target_image_data: str = None,
target_image_url: str = None,
target_type: str = None,
):
self.begin_time = begin_time
self.corp_id = corp_id
self.device_list = device_list
self.end_time = end_time
self.model_id = model_id
self.order_by = order_by
self.page_number = page_number
self.page_size = page_size
self.similarity_threshold = similarity_threshold
self.target_attributes = target_attributes
self.target_image_data = target_image_data
self.target_image_url = target_image_url
self.target_type = target_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.begin_time is not None:
result['BeginTime'] = self.begin_time
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.device_list is not None:
result['DeviceList'] = self.device_list
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.model_id is not None:
result['ModelId'] = self.model_id
if self.order_by is not None:
result['OrderBy'] = self.order_by
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.similarity_threshold is not None:
result['SimilarityThreshold'] = self.similarity_threshold
if self.target_attributes is not None:
result['TargetAttributes'] = self.target_attributes
if self.target_image_data is not None:
result['TargetImageData'] = self.target_image_data
if self.target_image_url is not None:
result['TargetImageUrl'] = self.target_image_url
if self.target_type is not None:
result['TargetType'] = self.target_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BeginTime') is not None:
self.begin_time = m.get('BeginTime')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DeviceList') is not None:
self.device_list = m.get('DeviceList')
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('ModelId') is not None:
self.model_id = m.get('ModelId')
if m.get('OrderBy') is not None:
self.order_by = m.get('OrderBy')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('SimilarityThreshold') is not None:
self.similarity_threshold = m.get('SimilarityThreshold')
if m.get('TargetAttributes') is not None:
self.target_attributes = m.get('TargetAttributes')
if m.get('TargetImageData') is not None:
self.target_image_data = m.get('TargetImageData')
if m.get('TargetImageUrl') is not None:
self.target_image_url = m.get('TargetImageUrl')
if m.get('TargetType') is not None:
self.target_type = m.get('TargetType')
return self
class SearchTargetResponseBodyDataRecords(TeaModel):
def __init__(
self,
device_id: str = None,
left_top_x: int = None,
left_top_y: int = None,
right_bottom_x: int = None,
right_bottom_y: int = None,
source_image_url: str = None,
target_attributes: str = None,
target_image_similarity: float = None,
target_image_url: str = None,
target_type: str = None,
timestamp: str = None,
):
self.device_id = device_id
self.left_top_x = left_top_x
self.left_top_y = left_top_y
self.right_bottom_x = right_bottom_x
self.right_bottom_y = right_bottom_y
self.source_image_url = source_image_url
self.target_attributes = target_attributes
self.target_image_similarity = target_image_similarity
self.target_image_url = target_image_url
self.target_type = target_type
self.timestamp = timestamp
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.left_top_x is not None:
result['LeftTopX'] = self.left_top_x
if self.left_top_y is not None:
result['LeftTopY'] = self.left_top_y
if self.right_bottom_x is not None:
result['RightBottomX'] = self.right_bottom_x
if self.right_bottom_y is not None:
result['RightBottomY'] = self.right_bottom_y
if self.source_image_url is not None:
result['SourceImageUrl'] = self.source_image_url
if self.target_attributes is not None:
result['TargetAttributes'] = self.target_attributes
if self.target_image_similarity is not None:
result['TargetImageSimilarity'] = self.target_image_similarity
if self.target_image_url is not None:
result['TargetImageUrl'] = self.target_image_url
if self.target_type is not None:
result['TargetType'] = self.target_type
if self.timestamp is not None:
result['Timestamp'] = self.timestamp
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('LeftTopX') is not None:
self.left_top_x = m.get('LeftTopX')
if m.get('LeftTopY') is not None:
self.left_top_y = m.get('LeftTopY')
if m.get('RightBottomX') is not None:
self.right_bottom_x = m.get('RightBottomX')
if m.get('RightBottomY') is not None:
self.right_bottom_y = m.get('RightBottomY')
if m.get('SourceImageUrl') is not None:
self.source_image_url = m.get('SourceImageUrl')
if m.get('TargetAttributes') is not None:
self.target_attributes = m.get('TargetAttributes')
if m.get('TargetImageSimilarity') is not None:
self.target_image_similarity = m.get('TargetImageSimilarity')
if m.get('TargetImageUrl') is not None:
self.target_image_url = m.get('TargetImageUrl')
if m.get('TargetType') is not None:
self.target_type = m.get('TargetType')
if m.get('Timestamp') is not None:
self.timestamp = m.get('Timestamp')
return self
class SearchTargetResponseBodyData(TeaModel):
def __init__(
self,
page_number: int = None,
page_size: int = None,
records: List[SearchTargetResponseBodyDataRecords] = None,
total_count: int = None,
):
self.page_number = page_number
self.page_size = page_size
self.records = records
self.total_count = total_count
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
if self.total_count is not None:
result['TotalCount'] = self.total_count
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = SearchTargetResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
return self
class SearchTargetResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: SearchTargetResponseBodyData = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = SearchTargetResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class SearchTargetResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: SearchTargetResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = SearchTargetResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class SetAiotStorageInfoRequestEventAlarmMq(TeaModel):
def __init__(
self,
alarm_topic: str = None,
event_topic: str = None,
instance_id: str = None,
mq_type: str = None,
ram_arn_role: str = None,
region_id: str = None,
):
self.alarm_topic = alarm_topic
self.event_topic = event_topic
self.instance_id = instance_id
self.mq_type = mq_type
self.ram_arn_role = ram_arn_role
self.region_id = region_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.alarm_topic is not None:
result['AlarmTopic'] = self.alarm_topic
if self.event_topic is not None:
result['EventTopic'] = self.event_topic
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.mq_type is not None:
result['MqType'] = self.mq_type
if self.ram_arn_role is not None:
result['RamArnRole'] = self.ram_arn_role
if self.region_id is not None:
result['RegionId'] = self.region_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlarmTopic') is not None:
self.alarm_topic = m.get('AlarmTopic')
if m.get('EventTopic') is not None:
self.event_topic = m.get('EventTopic')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('MqType') is not None:
self.mq_type = m.get('MqType')
if m.get('RamArnRole') is not None:
self.ram_arn_role = m.get('RamArnRole')
if m.get('RegionId') is not None:
self.region_id = m.get('RegionId')
return self
class SetAiotStorageInfoRequestEventAlarmPictureStorage(TeaModel):
def __init__(
self,
bucket: str = None,
endpoint: str = None,
path: str = None,
proxy: str = None,
ram_arn_role: str = None,
storage_type: str = None,
):
self.bucket = bucket
self.endpoint = endpoint
self.path = path
self.proxy = proxy
self.ram_arn_role = ram_arn_role
self.storage_type = storage_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.bucket is not None:
result['Bucket'] = self.bucket
if self.endpoint is not None:
result['Endpoint'] = self.endpoint
if self.path is not None:
result['Path'] = self.path
if self.proxy is not None:
result['Proxy'] = self.proxy
if self.ram_arn_role is not None:
result['RamArnRole'] = self.ram_arn_role
if self.storage_type is not None:
result['StorageType'] = self.storage_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Bucket') is not None:
self.bucket = m.get('Bucket')
if m.get('Endpoint') is not None:
self.endpoint = m.get('Endpoint')
if m.get('Path') is not None:
self.path = m.get('Path')
if m.get('Proxy') is not None:
self.proxy = m.get('Proxy')
if m.get('RamArnRole') is not None:
self.ram_arn_role = m.get('RamArnRole')
if m.get('StorageType') is not None:
self.storage_type = m.get('StorageType')
return self
class SetAiotStorageInfoRequest(TeaModel):
def __init__(
self,
event_alarm_mq: SetAiotStorageInfoRequestEventAlarmMq = None,
event_alarm_picture_storage: SetAiotStorageInfoRequestEventAlarmPictureStorage = None,
):
self.event_alarm_mq = event_alarm_mq
self.event_alarm_picture_storage = event_alarm_picture_storage
def validate(self):
if self.event_alarm_mq:
self.event_alarm_mq.validate()
if self.event_alarm_picture_storage:
self.event_alarm_picture_storage.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.event_alarm_mq is not None:
result['EventAlarmMq'] = self.event_alarm_mq.to_map()
if self.event_alarm_picture_storage is not None:
result['EventAlarmPictureStorage'] = self.event_alarm_picture_storage.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('EventAlarmMq') is not None:
temp_model = SetAiotStorageInfoRequestEventAlarmMq()
self.event_alarm_mq = temp_model.from_map(m['EventAlarmMq'])
if m.get('EventAlarmPictureStorage') is not None:
temp_model = SetAiotStorageInfoRequestEventAlarmPictureStorage()
self.event_alarm_picture_storage = temp_model.from_map(m['EventAlarmPictureStorage'])
return self
class SetAiotStorageInfoShrinkRequest(TeaModel):
def __init__(
self,
event_alarm_mq_shrink: str = None,
event_alarm_picture_storage_shrink: str = None,
):
self.event_alarm_mq_shrink = event_alarm_mq_shrink
self.event_alarm_picture_storage_shrink = event_alarm_picture_storage_shrink
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.event_alarm_mq_shrink is not None:
result['EventAlarmMq'] = self.event_alarm_mq_shrink
if self.event_alarm_picture_storage_shrink is not None:
result['EventAlarmPictureStorage'] = self.event_alarm_picture_storage_shrink
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('EventAlarmMq') is not None:
self.event_alarm_mq_shrink = m.get('EventAlarmMq')
if m.get('EventAlarmPictureStorage') is not None:
self.event_alarm_picture_storage_shrink = m.get('EventAlarmPictureStorage')
return self
class SetAiotStorageInfoResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class SetAiotStorageInfoResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: SetAiotStorageInfoResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = SetAiotStorageInfoResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class SetStreamModeRequest(TeaModel):
def __init__(
self,
device_id_list: str = None,
stream_mode: str = None,
):
self.device_id_list = device_id_list
self.stream_mode = stream_mode
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_id_list is not None:
result['DeviceIdList'] = self.device_id_list
if self.stream_mode is not None:
result['StreamMode'] = self.stream_mode
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceIdList') is not None:
self.device_id_list = m.get('DeviceIdList')
if m.get('StreamMode') is not None:
self.stream_mode = m.get('StreamMode')
return self
class SetStreamModeResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class SetStreamModeResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: SetStreamModeResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = SetStreamModeResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class StartModelServiceRequest(TeaModel):
def __init__(
self,
model_service_id: str = None,
):
self.model_service_id = model_service_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.model_service_id is not None:
result['ModelServiceId'] = self.model_service_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ModelServiceId') is not None:
self.model_service_id = m.get('ModelServiceId')
return self
class StartModelServiceResponseBodyData(TeaModel):
def __init__(
self,
model_service_instance_id: str = None,
model_service_instance_name: int = None,
model_service_status: str = None,
):
self.model_service_instance_id = model_service_instance_id
self.model_service_instance_name = model_service_instance_name
self.model_service_status = model_service_status
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.model_service_instance_id is not None:
result['ModelServiceInstanceId'] = self.model_service_instance_id
if self.model_service_instance_name is not None:
result['ModelServiceInstanceName'] = self.model_service_instance_name
if self.model_service_status is not None:
result['ModelServiceStatus'] = self.model_service_status
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ModelServiceInstanceId') is not None:
self.model_service_instance_id = m.get('ModelServiceInstanceId')
if m.get('ModelServiceInstanceName') is not None:
self.model_service_instance_name = m.get('ModelServiceInstanceName')
if m.get('ModelServiceStatus') is not None:
self.model_service_status = m.get('ModelServiceStatus')
return self
class StartModelServiceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: StartModelServiceResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = StartModelServiceResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class StartModelServiceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: StartModelServiceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = StartModelServiceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class StartStreamsRequest(TeaModel):
def __init__(
self,
description: str = None,
device_id_list: str = None,
):
self.description = description
# 全局编号,支持多个,英文逗号分隔
self.device_id_list = device_id_list
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.description is not None:
result['Description'] = self.description
if self.device_id_list is not None:
result['DeviceIdList'] = self.device_id_list
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Description') is not None:
self.description = m.get('Description')
if m.get('DeviceIdList') is not None:
self.device_id_list = m.get('DeviceIdList')
return self
class StartStreamsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class StartStreamsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: StartStreamsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = StartStreamsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class StopModelServiceRequest(TeaModel):
def __init__(
self,
model_service_id: str = None,
):
self.model_service_id = model_service_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.model_service_id is not None:
result['ModelServiceId'] = self.model_service_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ModelServiceId') is not None:
self.model_service_id = m.get('ModelServiceId')
return self
class StopModelServiceResponseBodyData(TeaModel):
def __init__(
self,
model_service_instance_id: str = None,
model_service_instance_name: int = None,
model_service_status: str = None,
):
self.model_service_instance_id = model_service_instance_id
self.model_service_instance_name = model_service_instance_name
self.model_service_status = model_service_status
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.model_service_instance_id is not None:
result['ModelServiceInstanceId'] = self.model_service_instance_id
if self.model_service_instance_name is not None:
result['ModelServiceInstanceName'] = self.model_service_instance_name
if self.model_service_status is not None:
result['ModelServiceStatus'] = self.model_service_status
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ModelServiceInstanceId') is not None:
self.model_service_instance_id = m.get('ModelServiceInstanceId')
if m.get('ModelServiceInstanceName') is not None:
self.model_service_instance_name = m.get('ModelServiceInstanceName')
if m.get('ModelServiceStatus') is not None:
self.model_service_status = m.get('ModelServiceStatus')
return self
class StopModelServiceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: StopModelServiceResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = StopModelServiceResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class StopModelServiceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: StopModelServiceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = StopModelServiceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class StopMonitorRequest(TeaModel):
def __init__(
self,
algorithm_vendor: str = None,
task_id: str = None,
):
self.algorithm_vendor = algorithm_vendor
self.task_id = task_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_vendor is not None:
result['AlgorithmVendor'] = self.algorithm_vendor
if self.task_id is not None:
result['TaskId'] = self.task_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmVendor') is not None:
self.algorithm_vendor = m.get('AlgorithmVendor')
if m.get('TaskId') is not None:
self.task_id = m.get('TaskId')
return self
class StopMonitorResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class StopMonitorResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: StopMonitorResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = StopMonitorResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class StopStreamsRequest(TeaModel):
def __init__(
self,
description: str = None,
device_id_list: str = None,
):
self.description = description
# 全局编号,支持多个,英文逗号分隔
self.device_id_list = device_id_list
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.description is not None:
result['Description'] = self.description
if self.device_id_list is not None:
result['DeviceIdList'] = self.device_id_list
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Description') is not None:
self.description = m.get('Description')
if m.get('DeviceIdList') is not None:
self.device_id_list = m.get('DeviceIdList')
return self
class StopStreamsResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class StopStreamsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: StopStreamsResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = StopStreamsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class SyncDeviceTimeRequest(TeaModel):
def __init__(
self,
device_sn: str = None,
device_time_stamp: str = None,
):
self.device_sn = device_sn
self.device_time_stamp = device_time_stamp
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_sn is not None:
result['DeviceSn'] = self.device_sn
if self.device_time_stamp is not None:
result['DeviceTimeStamp'] = self.device_time_stamp
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceSn') is not None:
self.device_sn = m.get('DeviceSn')
if m.get('DeviceTimeStamp') is not None:
self.device_time_stamp = m.get('DeviceTimeStamp')
return self
class SyncDeviceTimeResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
ntpserver: str = None,
request_id: str = None,
retry_interval: str = None,
sync_interval: str = None,
time_stamp: str = None,
):
self.code = code
self.message = message
self.ntpserver = ntpserver
self.request_id = request_id
self.retry_interval = retry_interval
self.sync_interval = sync_interval
self.time_stamp = time_stamp
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.ntpserver is not None:
result['NTPServer'] = self.ntpserver
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.retry_interval is not None:
result['RetryInterval'] = self.retry_interval
if self.sync_interval is not None:
result['SyncInterval'] = self.sync_interval
if self.time_stamp is not None:
result['TimeStamp'] = self.time_stamp
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('NTPServer') is not None:
self.ntpserver = m.get('NTPServer')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('RetryInterval') is not None:
self.retry_interval = m.get('RetryInterval')
if m.get('SyncInterval') is not None:
self.sync_interval = m.get('SyncInterval')
if m.get('TimeStamp') is not None:
self.time_stamp = m.get('TimeStamp')
return self
class SyncDeviceTimeResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: SyncDeviceTimeResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = SyncDeviceTimeResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class TestCrossRequest(TeaModel):
def __init__(
self,
data: str = None,
):
self.data = data
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.data is not None:
result['Data'] = self.data
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Data') is not None:
self.data = m.get('Data')
return self
class TestCrossResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: str = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
self.body = m.get('body')
return self
class UnbindCorpGroupRequest(TeaModel):
def __init__(
self,
corp_group_id: str = None,
corp_id: str = None,
):
self.corp_group_id = corp_group_id
self.corp_id = corp_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_group_id is not None:
result['CorpGroupId'] = self.corp_group_id
if self.corp_id is not None:
result['CorpId'] = self.corp_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpGroupId') is not None:
self.corp_group_id = m.get('CorpGroupId')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
return self
class UnbindCorpGroupResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.message = message
self.request_id = request_id
self.success = success
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class UnbindCorpGroupResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UnbindCorpGroupResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UnbindCorpGroupResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UnbindDevicesRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
device_id: str = None,
sub_device_id_list: str = None,
):
self.corp_id = corp_id
self.device_id = device_id
self.sub_device_id_list = sub_device_id_list
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.sub_device_id_list is not None:
result['SubDeviceIdList'] = self.sub_device_id_list
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('SubDeviceIdList') is not None:
self.sub_device_id_list = m.get('SubDeviceIdList')
return self
class UnbindDevicesResponseBodyDataSubDeviceList(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
sub_device_id: str = None,
):
self.code = code
self.message = message
self.sub_device_id = sub_device_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.sub_device_id is not None:
result['SubDeviceId'] = self.sub_device_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('SubDeviceId') is not None:
self.sub_device_id = m.get('SubDeviceId')
return self
class UnbindDevicesResponseBodyData(TeaModel):
def __init__(
self,
sub_device_list: List[UnbindDevicesResponseBodyDataSubDeviceList] = None,
):
self.sub_device_list = sub_device_list
def validate(self):
if self.sub_device_list:
for k in self.sub_device_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['SubDeviceList'] = []
if self.sub_device_list is not None:
for k in self.sub_device_list:
result['SubDeviceList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
self.sub_device_list = []
if m.get('SubDeviceList') is not None:
for k in m.get('SubDeviceList'):
temp_model = UnbindDevicesResponseBodyDataSubDeviceList()
self.sub_device_list.append(temp_model.from_map(k))
return self
class UnbindDevicesResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: UnbindDevicesResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = UnbindDevicesResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class UnbindDevicesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UnbindDevicesResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UnbindDevicesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UnbindPersonRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
isv_sub_id: str = None,
profile_id: int = None,
):
self.corp_id = corp_id
self.isv_sub_id = isv_sub_id
self.profile_id = profile_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.profile_id is not None:
result['ProfileId'] = self.profile_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('ProfileId') is not None:
self.profile_id = m.get('ProfileId')
return self
class UnbindPersonResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: bool = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class UnbindPersonResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UnbindPersonResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UnbindPersonResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UnbindUserRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
isv_sub_id: str = None,
user_id: int = None,
):
self.corp_id = corp_id
self.isv_sub_id = isv_sub_id
self.user_id = user_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.user_id is not None:
result['UserId'] = self.user_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('UserId') is not None:
self.user_id = m.get('UserId')
return self
class UnbindUserResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: bool = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class UnbindUserResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UnbindUserResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UnbindUserResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UpdateAiotDeviceRequestAiotDevice(TeaModel):
def __init__(
self,
ipaddr: str = None,
latitude: float = None,
longitude: float = None,
name: str = None,
place: str = None,
port: int = None,
):
self.ipaddr = ipaddr
self.latitude = latitude
self.longitude = longitude
self.name = name
self.place = place
self.port = port
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.ipaddr is not None:
result['IPAddr'] = self.ipaddr
if self.latitude is not None:
result['Latitude'] = self.latitude
if self.longitude is not None:
result['Longitude'] = self.longitude
if self.name is not None:
result['Name'] = self.name
if self.place is not None:
result['Place'] = self.place
if self.port is not None:
result['Port'] = self.port
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('IPAddr') is not None:
self.ipaddr = m.get('IPAddr')
if m.get('Latitude') is not None:
self.latitude = m.get('Latitude')
if m.get('Longitude') is not None:
self.longitude = m.get('Longitude')
if m.get('Name') is not None:
self.name = m.get('Name')
if m.get('Place') is not None:
self.place = m.get('Place')
if m.get('Port') is not None:
self.port = m.get('Port')
return self
class UpdateAiotDeviceRequest(TeaModel):
def __init__(
self,
aiot_device: UpdateAiotDeviceRequestAiotDevice = None,
id: str = None,
):
self.aiot_device = aiot_device
self.id = id
def validate(self):
if self.aiot_device:
self.aiot_device.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.aiot_device is not None:
result['AiotDevice'] = self.aiot_device.to_map()
if self.id is not None:
result['Id'] = self.id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AiotDevice') is not None:
temp_model = UpdateAiotDeviceRequestAiotDevice()
self.aiot_device = temp_model.from_map(m['AiotDevice'])
if m.get('Id') is not None:
self.id = m.get('Id')
return self
class UpdateAiotDeviceShrinkRequest(TeaModel):
def __init__(
self,
aiot_device_shrink: str = None,
id: str = None,
):
self.aiot_device_shrink = aiot_device_shrink
self.id = id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.aiot_device_shrink is not None:
result['AiotDevice'] = self.aiot_device_shrink
if self.id is not None:
result['Id'] = self.id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AiotDevice') is not None:
self.aiot_device_shrink = m.get('AiotDevice')
if m.get('Id') is not None:
self.id = m.get('Id')
return self
class UpdateAiotDeviceResponseBodyAiotDevice(TeaModel):
def __init__(
self,
cap_direction: str = None,
corp_id: str = None,
device_id: str = None,
device_type: str = None,
firmware_version: str = None,
ipaddr: str = None,
ipv6addr: str = None,
ipv_4gateway: str = None,
ipv_4netmask: str = None,
is_online: str = None,
latitude: float = None,
longitude: float = None,
mac: str = None,
manufacturer: str = None,
model: str = None,
monitor_area_desc: str = None,
monitor_direction: str = None,
name: str = None,
org_code: str = None,
owner_aps_id: str = None,
password: str = None,
place: str = None,
place_code: str = None,
port: int = None,
serial_number: str = None,
user_id: str = None,
):
self.cap_direction = cap_direction
self.corp_id = corp_id
self.device_id = device_id
self.device_type = device_type
self.firmware_version = firmware_version
self.ipaddr = ipaddr
self.ipv6addr = ipv6addr
self.ipv_4gateway = ipv_4gateway
self.ipv_4netmask = ipv_4netmask
self.is_online = is_online
self.latitude = latitude
self.longitude = longitude
self.mac = mac
self.manufacturer = manufacturer
self.model = model
self.monitor_area_desc = monitor_area_desc
self.monitor_direction = monitor_direction
self.name = name
self.org_code = org_code
self.owner_aps_id = owner_aps_id
self.password = password
self.place = place
self.place_code = place_code
self.port = port
self.serial_number = serial_number
self.user_id = user_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.cap_direction is not None:
result['CapDirection'] = self.cap_direction
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.device_type is not None:
result['DeviceType'] = self.device_type
if self.firmware_version is not None:
result['FirmwareVersion'] = self.firmware_version
if self.ipaddr is not None:
result['IPAddr'] = self.ipaddr
if self.ipv6addr is not None:
result['IPV6Addr'] = self.ipv6addr
if self.ipv_4gateway is not None:
result['IPv4Gateway'] = self.ipv_4gateway
if self.ipv_4netmask is not None:
result['IPv4Netmask'] = self.ipv_4netmask
if self.is_online is not None:
result['IsOnline'] = self.is_online
if self.latitude is not None:
result['Latitude'] = self.latitude
if self.longitude is not None:
result['Longitude'] = self.longitude
if self.mac is not None:
result['MAC'] = self.mac
if self.manufacturer is not None:
result['Manufacturer'] = self.manufacturer
if self.model is not None:
result['Model'] = self.model
if self.monitor_area_desc is not None:
result['MonitorAreaDesc'] = self.monitor_area_desc
if self.monitor_direction is not None:
result['MonitorDirection'] = self.monitor_direction
if self.name is not None:
result['Name'] = self.name
if self.org_code is not None:
result['OrgCode'] = self.org_code
if self.owner_aps_id is not None:
result['OwnerApsID'] = self.owner_aps_id
if self.password is not None:
result['Password'] = self.password
if self.place is not None:
result['Place'] = self.place
if self.place_code is not None:
result['PlaceCode'] = self.place_code
if self.port is not None:
result['Port'] = self.port
if self.serial_number is not None:
result['SerialNumber'] = self.serial_number
if self.user_id is not None:
result['UserId'] = self.user_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CapDirection') is not None:
self.cap_direction = m.get('CapDirection')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('DeviceType') is not None:
self.device_type = m.get('DeviceType')
if m.get('FirmwareVersion') is not None:
self.firmware_version = m.get('FirmwareVersion')
if m.get('IPAddr') is not None:
self.ipaddr = m.get('IPAddr')
if m.get('IPV6Addr') is not None:
self.ipv6addr = m.get('IPV6Addr')
if m.get('IPv4Gateway') is not None:
self.ipv_4gateway = m.get('IPv4Gateway')
if m.get('IPv4Netmask') is not None:
self.ipv_4netmask = m.get('IPv4Netmask')
if m.get('IsOnline') is not None:
self.is_online = m.get('IsOnline')
if m.get('Latitude') is not None:
self.latitude = m.get('Latitude')
if m.get('Longitude') is not None:
self.longitude = m.get('Longitude')
if m.get('MAC') is not None:
self.mac = m.get('MAC')
if m.get('Manufacturer') is not None:
self.manufacturer = m.get('Manufacturer')
if m.get('Model') is not None:
self.model = m.get('Model')
if m.get('MonitorAreaDesc') is not None:
self.monitor_area_desc = m.get('MonitorAreaDesc')
if m.get('MonitorDirection') is not None:
self.monitor_direction = m.get('MonitorDirection')
if m.get('Name') is not None:
self.name = m.get('Name')
if m.get('OrgCode') is not None:
self.org_code = m.get('OrgCode')
if m.get('OwnerApsID') is not None:
self.owner_aps_id = m.get('OwnerApsID')
if m.get('Password') is not None:
self.password = m.get('Password')
if m.get('Place') is not None:
self.place = m.get('Place')
if m.get('PlaceCode') is not None:
self.place_code = m.get('PlaceCode')
if m.get('Port') is not None:
self.port = m.get('Port')
if m.get('SerialNumber') is not None:
self.serial_number = m.get('SerialNumber')
if m.get('UserId') is not None:
self.user_id = m.get('UserId')
return self
class UpdateAiotDeviceResponseBody(TeaModel):
def __init__(
self,
aiot_device: UpdateAiotDeviceResponseBodyAiotDevice = None,
code: str = None,
message: str = None,
request_id: str = None,
):
self.aiot_device = aiot_device
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
if self.aiot_device:
self.aiot_device.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.aiot_device is not None:
result['AiotDevice'] = self.aiot_device.to_map()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AiotDevice') is not None:
temp_model = UpdateAiotDeviceResponseBodyAiotDevice()
self.aiot_device = temp_model.from_map(m['AiotDevice'])
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class UpdateAiotDeviceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UpdateAiotDeviceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UpdateAiotDeviceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UpdateAiotPersonTableRequestPersonTable(TeaModel):
def __init__(
self,
name: str = None,
person_table_id: str = None,
type: int = None,
verification_model_list: List[int] = None,
):
self.name = name
self.person_table_id = person_table_id
self.type = type
self.verification_model_list = verification_model_list
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.name is not None:
result['Name'] = self.name
if self.person_table_id is not None:
result['PersonTableId'] = self.person_table_id
if self.type is not None:
result['Type'] = self.type
if self.verification_model_list is not None:
result['VerificationModelList'] = self.verification_model_list
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Name') is not None:
self.name = m.get('Name')
if m.get('PersonTableId') is not None:
self.person_table_id = m.get('PersonTableId')
if m.get('Type') is not None:
self.type = m.get('Type')
if m.get('VerificationModelList') is not None:
self.verification_model_list = m.get('VerificationModelList')
return self
class UpdateAiotPersonTableRequest(TeaModel):
def __init__(
self,
id: str = None,
person_table: UpdateAiotPersonTableRequestPersonTable = None,
):
self.id = id
self.person_table = person_table
def validate(self):
if self.person_table:
self.person_table.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['Id'] = self.id
if self.person_table is not None:
result['PersonTable'] = self.person_table.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Id') is not None:
self.id = m.get('Id')
if m.get('PersonTable') is not None:
temp_model = UpdateAiotPersonTableRequestPersonTable()
self.person_table = temp_model.from_map(m['PersonTable'])
return self
class UpdateAiotPersonTableResponseBodyPersonTable(TeaModel):
def __init__(
self,
device_id: str = None,
face_num: int = None,
last_change: str = None,
name: str = None,
person_num: int = None,
person_table_id: str = None,
total_person_num: int = None,
type: int = None,
verification_model_list: List[int] = None,
):
self.device_id = device_id
self.face_num = face_num
self.last_change = last_change
self.name = name
self.person_num = person_num
self.person_table_id = person_table_id
self.total_person_num = total_person_num
self.type = type
self.verification_model_list = verification_model_list
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.face_num is not None:
result['FaceNum'] = self.face_num
if self.last_change is not None:
result['LastChange'] = self.last_change
if self.name is not None:
result['Name'] = self.name
if self.person_num is not None:
result['PersonNum'] = self.person_num
if self.person_table_id is not None:
result['PersonTableId'] = self.person_table_id
if self.total_person_num is not None:
result['TotalPersonNum'] = self.total_person_num
if self.type is not None:
result['Type'] = self.type
if self.verification_model_list is not None:
result['VerificationModelList'] = self.verification_model_list
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('FaceNum') is not None:
self.face_num = m.get('FaceNum')
if m.get('LastChange') is not None:
self.last_change = m.get('LastChange')
if m.get('Name') is not None:
self.name = m.get('Name')
if m.get('PersonNum') is not None:
self.person_num = m.get('PersonNum')
if m.get('PersonTableId') is not None:
self.person_table_id = m.get('PersonTableId')
if m.get('TotalPersonNum') is not None:
self.total_person_num = m.get('TotalPersonNum')
if m.get('Type') is not None:
self.type = m.get('Type')
if m.get('VerificationModelList') is not None:
self.verification_model_list = m.get('VerificationModelList')
return self
class UpdateAiotPersonTableResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
person_table: UpdateAiotPersonTableResponseBodyPersonTable = None,
request_id: str = None,
):
self.code = code
self.message = message
self.person_table = person_table
# Id of the request
self.request_id = request_id
def validate(self):
if self.person_table:
self.person_table.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.person_table is not None:
result['PersonTable'] = self.person_table.to_map()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('PersonTable') is not None:
temp_model = UpdateAiotPersonTableResponseBodyPersonTable()
self.person_table = temp_model.from_map(m['PersonTable'])
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class UpdateAiotPersonTableResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UpdateAiotPersonTableResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UpdateAiotPersonTableResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UpdateAiotPersonTableItemRequestPersonTableItemIdentificationList(TeaModel):
def __init__(
self,
number: str = None,
type: int = None,
):
self.number = number
self.type = type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.number is not None:
result['Number'] = self.number
if self.type is not None:
result['Type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Number') is not None:
self.number = m.get('Number')
if m.get('Type') is not None:
self.type = m.get('Type')
return self
class UpdateAiotPersonTableItemRequestPersonTableItemImageListFeatureInfo(TeaModel):
def __init__(
self,
algorithm_type: str = None,
algorithm_version: str = None,
feature_data: str = None,
image_id: str = None,
object_id: str = None,
tab_ie_id: str = None,
vendor: str = None,
):
self.algorithm_type = algorithm_type
self.algorithm_version = algorithm_version
self.feature_data = feature_data
self.image_id = image_id
self.object_id = object_id
self.tab_ie_id = tab_ie_id
self.vendor = vendor
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_type is not None:
result['AlgorithmType'] = self.algorithm_type
if self.algorithm_version is not None:
result['AlgorithmVersion'] = self.algorithm_version
if self.feature_data is not None:
result['FeatureData'] = self.feature_data
if self.image_id is not None:
result['ImageId'] = self.image_id
if self.object_id is not None:
result['ObjectId'] = self.object_id
if self.tab_ie_id is not None:
result['TabIeId'] = self.tab_ie_id
if self.vendor is not None:
result['Vendor'] = self.vendor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmType') is not None:
self.algorithm_type = m.get('AlgorithmType')
if m.get('AlgorithmVersion') is not None:
self.algorithm_version = m.get('AlgorithmVersion')
if m.get('FeatureData') is not None:
self.feature_data = m.get('FeatureData')
if m.get('ImageId') is not None:
self.image_id = m.get('ImageId')
if m.get('ObjectId') is not None:
self.object_id = m.get('ObjectId')
if m.get('TabIeId') is not None:
self.tab_ie_id = m.get('TabIeId')
if m.get('Vendor') is not None:
self.vendor = m.get('Vendor')
return self
class UpdateAiotPersonTableItemRequestPersonTableItemImageList(TeaModel):
def __init__(
self,
data: str = None,
device_id: str = None,
event_sort: str = None,
feature_info: UpdateAiotPersonTableItemRequestPersonTableItemImageListFeatureInfo = None,
file_format: str = None,
height: int = None,
image_id: str = None,
shot_time: str = None,
size: int = None,
storage_path: str = None,
type: str = None,
width: int = None,
):
self.data = data
self.device_id = device_id
self.event_sort = event_sort
self.feature_info = feature_info
self.file_format = file_format
self.height = height
self.image_id = image_id
self.shot_time = shot_time
self.size = size
self.storage_path = storage_path
self.type = type
self.width = width
def validate(self):
if self.feature_info:
self.feature_info.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.data is not None:
result['Data'] = self.data
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.event_sort is not None:
result['EventSort'] = self.event_sort
if self.feature_info is not None:
result['FeatureInfo'] = self.feature_info.to_map()
if self.file_format is not None:
result['FileFormat'] = self.file_format
if self.height is not None:
result['Height'] = self.height
if self.image_id is not None:
result['ImageId'] = self.image_id
if self.shot_time is not None:
result['ShotTime'] = self.shot_time
if self.size is not None:
result['Size'] = self.size
if self.storage_path is not None:
result['StoragePath'] = self.storage_path
if self.type is not None:
result['Type'] = self.type
if self.width is not None:
result['Width'] = self.width
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('EventSort') is not None:
self.event_sort = m.get('EventSort')
if m.get('FeatureInfo') is not None:
temp_model = UpdateAiotPersonTableItemRequestPersonTableItemImageListFeatureInfo()
self.feature_info = temp_model.from_map(m['FeatureInfo'])
if m.get('FileFormat') is not None:
self.file_format = m.get('FileFormat')
if m.get('Height') is not None:
self.height = m.get('Height')
if m.get('ImageId') is not None:
self.image_id = m.get('ImageId')
if m.get('ShotTime') is not None:
self.shot_time = m.get('ShotTime')
if m.get('Size') is not None:
self.size = m.get('Size')
if m.get('StoragePath') is not None:
self.storage_path = m.get('StoragePath')
if m.get('Type') is not None:
self.type = m.get('Type')
if m.get('Width') is not None:
self.width = m.get('Width')
return self
class UpdateAiotPersonTableItemRequestPersonTableItem(TeaModel):
def __init__(
self,
identification_list: List[UpdateAiotPersonTableItemRequestPersonTableItemIdentificationList] = None,
identification_num: int = None,
image_list: List[UpdateAiotPersonTableItemRequestPersonTableItemImageList] = None,
image_num: int = None,
person_code: str = None,
person_id: str = None,
person_name: str = None,
remarks: str = None,
):
self.identification_list = identification_list
self.identification_num = identification_num
self.image_list = image_list
self.image_num = image_num
self.person_code = person_code
self.person_id = person_id
self.person_name = person_name
self.remarks = remarks
def validate(self):
if self.identification_list:
for k in self.identification_list:
if k:
k.validate()
if self.image_list:
for k in self.image_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['IdentificationList'] = []
if self.identification_list is not None:
for k in self.identification_list:
result['IdentificationList'].append(k.to_map() if k else None)
if self.identification_num is not None:
result['IdentificationNum'] = self.identification_num
result['ImageList'] = []
if self.image_list is not None:
for k in self.image_list:
result['ImageList'].append(k.to_map() if k else None)
if self.image_num is not None:
result['ImageNum'] = self.image_num
if self.person_code is not None:
result['PersonCode'] = self.person_code
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.person_name is not None:
result['PersonName'] = self.person_name
if self.remarks is not None:
result['Remarks'] = self.remarks
return result
def from_map(self, m: dict = None):
m = m or dict()
self.identification_list = []
if m.get('IdentificationList') is not None:
for k in m.get('IdentificationList'):
temp_model = UpdateAiotPersonTableItemRequestPersonTableItemIdentificationList()
self.identification_list.append(temp_model.from_map(k))
if m.get('IdentificationNum') is not None:
self.identification_num = m.get('IdentificationNum')
self.image_list = []
if m.get('ImageList') is not None:
for k in m.get('ImageList'):
temp_model = UpdateAiotPersonTableItemRequestPersonTableItemImageList()
self.image_list.append(temp_model.from_map(k))
if m.get('ImageNum') is not None:
self.image_num = m.get('ImageNum')
if m.get('PersonCode') is not None:
self.person_code = m.get('PersonCode')
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('PersonName') is not None:
self.person_name = m.get('PersonName')
if m.get('Remarks') is not None:
self.remarks = m.get('Remarks')
return self
class UpdateAiotPersonTableItemRequest(TeaModel):
def __init__(
self,
id: str = None,
person_table_id: str = None,
person_table_item: UpdateAiotPersonTableItemRequestPersonTableItem = None,
):
self.id = id
self.person_table_id = person_table_id
self.person_table_item = person_table_item
def validate(self):
if self.person_table_item:
self.person_table_item.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['Id'] = self.id
if self.person_table_id is not None:
result['PersonTableId'] = self.person_table_id
if self.person_table_item is not None:
result['PersonTableItem'] = self.person_table_item.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Id') is not None:
self.id = m.get('Id')
if m.get('PersonTableId') is not None:
self.person_table_id = m.get('PersonTableId')
if m.get('PersonTableItem') is not None:
temp_model = UpdateAiotPersonTableItemRequestPersonTableItem()
self.person_table_item = temp_model.from_map(m['PersonTableItem'])
return self
class UpdateAiotPersonTableItemResponseBodyPersonTableItemIdentificationList(TeaModel):
def __init__(
self,
number: str = None,
type: int = None,
):
self.number = number
self.type = type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.number is not None:
result['Number'] = self.number
if self.type is not None:
result['Type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Number') is not None:
self.number = m.get('Number')
if m.get('Type') is not None:
self.type = m.get('Type')
return self
class UpdateAiotPersonTableItemResponseBodyPersonTableItemImageListFeatureInfo(TeaModel):
def __init__(
self,
algorithm_type: str = None,
algorithm_version: str = None,
feature_data: str = None,
image_id: str = None,
object_id: str = None,
tab_ied: str = None,
vendor: str = None,
):
self.algorithm_type = algorithm_type
self.algorithm_version = algorithm_version
self.feature_data = feature_data
self.image_id = image_id
self.object_id = object_id
self.tab_ied = tab_ied
self.vendor = vendor
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_type is not None:
result['AlgorithmType'] = self.algorithm_type
if self.algorithm_version is not None:
result['AlgorithmVersion'] = self.algorithm_version
if self.feature_data is not None:
result['FeatureData'] = self.feature_data
if self.image_id is not None:
result['ImageId'] = self.image_id
if self.object_id is not None:
result['ObjectId'] = self.object_id
if self.tab_ied is not None:
result['TabIed'] = self.tab_ied
if self.vendor is not None:
result['Vendor'] = self.vendor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmType') is not None:
self.algorithm_type = m.get('AlgorithmType')
if m.get('AlgorithmVersion') is not None:
self.algorithm_version = m.get('AlgorithmVersion')
if m.get('FeatureData') is not None:
self.feature_data = m.get('FeatureData')
if m.get('ImageId') is not None:
self.image_id = m.get('ImageId')
if m.get('ObjectId') is not None:
self.object_id = m.get('ObjectId')
if m.get('TabIed') is not None:
self.tab_ied = m.get('TabIed')
if m.get('Vendor') is not None:
self.vendor = m.get('Vendor')
return self
class UpdateAiotPersonTableItemResponseBodyPersonTableItemImageList(TeaModel):
def __init__(
self,
data: str = None,
device_id: str = None,
event_sort: str = None,
feature_info: UpdateAiotPersonTableItemResponseBodyPersonTableItemImageListFeatureInfo = None,
file_format: str = None,
height: int = None,
image_id: str = None,
shot_time: str = None,
size: int = None,
storage_path: str = None,
type: str = None,
width: int = None,
):
self.data = data
self.device_id = device_id
self.event_sort = event_sort
self.feature_info = feature_info
self.file_format = file_format
self.height = height
self.image_id = image_id
self.shot_time = shot_time
self.size = size
self.storage_path = storage_path
self.type = type
self.width = width
def validate(self):
if self.feature_info:
self.feature_info.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.data is not None:
result['Data'] = self.data
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.event_sort is not None:
result['EventSort'] = self.event_sort
if self.feature_info is not None:
result['FeatureInfo'] = self.feature_info.to_map()
if self.file_format is not None:
result['FileFormat'] = self.file_format
if self.height is not None:
result['Height'] = self.height
if self.image_id is not None:
result['ImageId'] = self.image_id
if self.shot_time is not None:
result['ShotTime'] = self.shot_time
if self.size is not None:
result['Size'] = self.size
if self.storage_path is not None:
result['StoragePath'] = self.storage_path
if self.type is not None:
result['Type'] = self.type
if self.width is not None:
result['Width'] = self.width
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('EventSort') is not None:
self.event_sort = m.get('EventSort')
if m.get('FeatureInfo') is not None:
temp_model = UpdateAiotPersonTableItemResponseBodyPersonTableItemImageListFeatureInfo()
self.feature_info = temp_model.from_map(m['FeatureInfo'])
if m.get('FileFormat') is not None:
self.file_format = m.get('FileFormat')
if m.get('Height') is not None:
self.height = m.get('Height')
if m.get('ImageId') is not None:
self.image_id = m.get('ImageId')
if m.get('ShotTime') is not None:
self.shot_time = m.get('ShotTime')
if m.get('Size') is not None:
self.size = m.get('Size')
if m.get('StoragePath') is not None:
self.storage_path = m.get('StoragePath')
if m.get('Type') is not None:
self.type = m.get('Type')
if m.get('Width') is not None:
self.width = m.get('Width')
return self
class UpdateAiotPersonTableItemResponseBodyPersonTableItem(TeaModel):
def __init__(
self,
identification_list: List[UpdateAiotPersonTableItemResponseBodyPersonTableItemIdentificationList] = None,
identification_num: int = None,
image_list: List[UpdateAiotPersonTableItemResponseBodyPersonTableItemImageList] = None,
image_num: int = None,
last_change: str = None,
person_code: str = None,
person_id: str = None,
person_name: str = None,
person_table_id: str = None,
remarks: str = None,
):
self.identification_list = identification_list
self.identification_num = identification_num
self.image_list = image_list
self.image_num = image_num
self.last_change = last_change
self.person_code = person_code
self.person_id = person_id
self.person_name = person_name
self.person_table_id = person_table_id
self.remarks = remarks
def validate(self):
if self.identification_list:
for k in self.identification_list:
if k:
k.validate()
if self.image_list:
for k in self.image_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['IdentificationList'] = []
if self.identification_list is not None:
for k in self.identification_list:
result['IdentificationList'].append(k.to_map() if k else None)
if self.identification_num is not None:
result['IdentificationNum'] = self.identification_num
result['ImageList'] = []
if self.image_list is not None:
for k in self.image_list:
result['ImageList'].append(k.to_map() if k else None)
if self.image_num is not None:
result['ImageNum'] = self.image_num
if self.last_change is not None:
result['LastChange'] = self.last_change
if self.person_code is not None:
result['PersonCode'] = self.person_code
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.person_name is not None:
result['PersonName'] = self.person_name
if self.person_table_id is not None:
result['PersonTableId'] = self.person_table_id
if self.remarks is not None:
result['Remarks'] = self.remarks
return result
def from_map(self, m: dict = None):
m = m or dict()
self.identification_list = []
if m.get('IdentificationList') is not None:
for k in m.get('IdentificationList'):
temp_model = UpdateAiotPersonTableItemResponseBodyPersonTableItemIdentificationList()
self.identification_list.append(temp_model.from_map(k))
if m.get('IdentificationNum') is not None:
self.identification_num = m.get('IdentificationNum')
self.image_list = []
if m.get('ImageList') is not None:
for k in m.get('ImageList'):
temp_model = UpdateAiotPersonTableItemResponseBodyPersonTableItemImageList()
self.image_list.append(temp_model.from_map(k))
if m.get('ImageNum') is not None:
self.image_num = m.get('ImageNum')
if m.get('LastChange') is not None:
self.last_change = m.get('LastChange')
if m.get('PersonCode') is not None:
self.person_code = m.get('PersonCode')
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('PersonName') is not None:
self.person_name = m.get('PersonName')
if m.get('PersonTableId') is not None:
self.person_table_id = m.get('PersonTableId')
if m.get('Remarks') is not None:
self.remarks = m.get('Remarks')
return self
class UpdateAiotPersonTableItemResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
person_table_item: UpdateAiotPersonTableItemResponseBodyPersonTableItem = None,
request_id: str = None,
):
self.code = code
self.message = message
self.person_table_item = person_table_item
# Id of the request
self.request_id = request_id
def validate(self):
if self.person_table_item:
self.person_table_item.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.person_table_item is not None:
result['PersonTableItem'] = self.person_table_item.to_map()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('PersonTableItem') is not None:
temp_model = UpdateAiotPersonTableItemResponseBodyPersonTableItem()
self.person_table_item = temp_model.from_map(m['PersonTableItem'])
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class UpdateAiotPersonTableItemResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UpdateAiotPersonTableItemResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UpdateAiotPersonTableItemResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UpdateAiotVehicleTableItemRequestVehicleTableItem(TeaModel):
def __init__(
self,
begin_time: str = None,
end_time: str = None,
owner_name: str = None,
phone_no: str = None,
plate_no: str = None,
remarks: str = None,
vehicle_table_item_id: str = None,
):
self.begin_time = begin_time
self.end_time = end_time
self.owner_name = owner_name
self.phone_no = phone_no
self.plate_no = plate_no
self.remarks = remarks
self.vehicle_table_item_id = vehicle_table_item_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.begin_time is not None:
result['BeginTime'] = self.begin_time
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.owner_name is not None:
result['OwnerName'] = self.owner_name
if self.phone_no is not None:
result['PhoneNo'] = self.phone_no
if self.plate_no is not None:
result['PlateNo'] = self.plate_no
if self.remarks is not None:
result['Remarks'] = self.remarks
if self.vehicle_table_item_id is not None:
result['VehicleTableItemId'] = self.vehicle_table_item_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BeginTime') is not None:
self.begin_time = m.get('BeginTime')
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('OwnerName') is not None:
self.owner_name = m.get('OwnerName')
if m.get('PhoneNo') is not None:
self.phone_no = m.get('PhoneNo')
if m.get('PlateNo') is not None:
self.plate_no = m.get('PlateNo')
if m.get('Remarks') is not None:
self.remarks = m.get('Remarks')
if m.get('VehicleTableItemId') is not None:
self.vehicle_table_item_id = m.get('VehicleTableItemId')
return self
class UpdateAiotVehicleTableItemRequest(TeaModel):
def __init__(
self,
id: str = None,
vehicle_table_id: str = None,
vehicle_table_item: UpdateAiotVehicleTableItemRequestVehicleTableItem = None,
):
self.id = id
self.vehicle_table_id = vehicle_table_id
self.vehicle_table_item = vehicle_table_item
def validate(self):
if self.vehicle_table_item:
self.vehicle_table_item.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['Id'] = self.id
if self.vehicle_table_id is not None:
result['VehicleTableId'] = self.vehicle_table_id
if self.vehicle_table_item is not None:
result['VehicleTableItem'] = self.vehicle_table_item.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Id') is not None:
self.id = m.get('Id')
if m.get('VehicleTableId') is not None:
self.vehicle_table_id = m.get('VehicleTableId')
if m.get('VehicleTableItem') is not None:
temp_model = UpdateAiotVehicleTableItemRequestVehicleTableItem()
self.vehicle_table_item = temp_model.from_map(m['VehicleTableItem'])
return self
class UpdateAiotVehicleTableItemShrinkRequest(TeaModel):
def __init__(
self,
id: str = None,
vehicle_table_id: str = None,
vehicle_table_item_shrink: str = None,
):
self.id = id
self.vehicle_table_id = vehicle_table_id
self.vehicle_table_item_shrink = vehicle_table_item_shrink
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['Id'] = self.id
if self.vehicle_table_id is not None:
result['VehicleTableId'] = self.vehicle_table_id
if self.vehicle_table_item_shrink is not None:
result['VehicleTableItem'] = self.vehicle_table_item_shrink
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Id') is not None:
self.id = m.get('Id')
if m.get('VehicleTableId') is not None:
self.vehicle_table_id = m.get('VehicleTableId')
if m.get('VehicleTableItem') is not None:
self.vehicle_table_item_shrink = m.get('VehicleTableItem')
return self
class UpdateAiotVehicleTableItemResponseBodyVehicleTableItem(TeaModel):
def __init__(
self,
begin_time: str = None,
end_time: str = None,
owner_name: str = None,
phone_no: str = None,
plate_no: str = None,
remarks: str = None,
vehicle_table_id: str = None,
vehicle_table_item_id: str = None,
):
self.begin_time = begin_time
self.end_time = end_time
self.owner_name = owner_name
self.phone_no = phone_no
self.plate_no = plate_no
self.remarks = remarks
self.vehicle_table_id = vehicle_table_id
self.vehicle_table_item_id = vehicle_table_item_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.begin_time is not None:
result['BeginTime'] = self.begin_time
if self.end_time is not None:
result['EndTime'] = self.end_time
if self.owner_name is not None:
result['OwnerName'] = self.owner_name
if self.phone_no is not None:
result['PhoneNo'] = self.phone_no
if self.plate_no is not None:
result['PlateNo'] = self.plate_no
if self.remarks is not None:
result['Remarks'] = self.remarks
if self.vehicle_table_id is not None:
result['VehicleTableId'] = self.vehicle_table_id
if self.vehicle_table_item_id is not None:
result['VehicleTableItemId'] = self.vehicle_table_item_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BeginTime') is not None:
self.begin_time = m.get('BeginTime')
if m.get('EndTime') is not None:
self.end_time = m.get('EndTime')
if m.get('OwnerName') is not None:
self.owner_name = m.get('OwnerName')
if m.get('PhoneNo') is not None:
self.phone_no = m.get('PhoneNo')
if m.get('PlateNo') is not None:
self.plate_no = m.get('PlateNo')
if m.get('Remarks') is not None:
self.remarks = m.get('Remarks')
if m.get('VehicleTableId') is not None:
self.vehicle_table_id = m.get('VehicleTableId')
if m.get('VehicleTableItemId') is not None:
self.vehicle_table_item_id = m.get('VehicleTableItemId')
return self
class UpdateAiotVehicleTableItemResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
vehicle_table_item: UpdateAiotVehicleTableItemResponseBodyVehicleTableItem = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
self.vehicle_table_item = vehicle_table_item
def validate(self):
if self.vehicle_table_item:
self.vehicle_table_item.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.vehicle_table_item is not None:
result['VehicleTableItem'] = self.vehicle_table_item.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('VehicleTableItem') is not None:
temp_model = UpdateAiotVehicleTableItemResponseBodyVehicleTableItem()
self.vehicle_table_item = temp_model.from_map(m['VehicleTableItem'])
return self
class UpdateAiotVehicleTableItemResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UpdateAiotVehicleTableItemResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UpdateAiotVehicleTableItemResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UpdateCorpRequest(TeaModel):
def __init__(
self,
app_name: str = None,
corp_id: str = None,
corp_name: str = None,
description: str = None,
icon_path: str = None,
isv_sub_id: str = None,
parent_corp_id: str = None,
):
self.app_name = app_name
self.corp_id = corp_id
self.corp_name = corp_name
self.description = description
self.icon_path = icon_path
self.isv_sub_id = isv_sub_id
self.parent_corp_id = parent_corp_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.app_name is not None:
result['AppName'] = self.app_name
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.corp_name is not None:
result['CorpName'] = self.corp_name
if self.description is not None:
result['Description'] = self.description
if self.icon_path is not None:
result['IconPath'] = self.icon_path
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.parent_corp_id is not None:
result['ParentCorpId'] = self.parent_corp_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AppName') is not None:
self.app_name = m.get('AppName')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('CorpName') is not None:
self.corp_name = m.get('CorpName')
if m.get('Description') is not None:
self.description = m.get('Description')
if m.get('IconPath') is not None:
self.icon_path = m.get('IconPath')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('ParentCorpId') is not None:
self.parent_corp_id = m.get('ParentCorpId')
return self
class UpdateCorpResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class UpdateCorpResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UpdateCorpResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UpdateCorpResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UpdateDataSourceRequest(TeaModel):
def __init__(
self,
data_source_id: str = None,
data_source_name: str = None,
description: str = None,
url: str = None,
):
self.data_source_id = data_source_id
self.data_source_name = data_source_name
self.description = description
self.url = url
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.data_source_id is not None:
result['DataSourceId'] = self.data_source_id
if self.data_source_name is not None:
result['DataSourceName'] = self.data_source_name
if self.description is not None:
result['Description'] = self.description
if self.url is not None:
result['Url'] = self.url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DataSourceId') is not None:
self.data_source_id = m.get('DataSourceId')
if m.get('DataSourceName') is not None:
self.data_source_name = m.get('DataSourceName')
if m.get('Description') is not None:
self.description = m.get('Description')
if m.get('Url') is not None:
self.url = m.get('Url')
return self
class UpdateDataSourceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class UpdateDataSourceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UpdateDataSourceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UpdateDataSourceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UpdateDeviceRequest(TeaModel):
def __init__(
self,
bit_rate: str = None,
corp_id: str = None,
device_address: str = None,
device_direction: str = None,
device_name: str = None,
device_resolution: str = None,
device_site: str = None,
device_type: str = None,
gb_id: str = None,
vendor: str = None,
):
self.bit_rate = bit_rate
self.corp_id = corp_id
self.device_address = device_address
self.device_direction = device_direction
self.device_name = device_name
self.device_resolution = device_resolution
self.device_site = device_site
self.device_type = device_type
self.gb_id = gb_id
self.vendor = vendor
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.bit_rate is not None:
result['BitRate'] = self.bit_rate
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.device_address is not None:
result['DeviceAddress'] = self.device_address
if self.device_direction is not None:
result['DeviceDirection'] = self.device_direction
if self.device_name is not None:
result['DeviceName'] = self.device_name
if self.device_resolution is not None:
result['DeviceResolution'] = self.device_resolution
if self.device_site is not None:
result['DeviceSite'] = self.device_site
if self.device_type is not None:
result['DeviceType'] = self.device_type
if self.gb_id is not None:
result['GbId'] = self.gb_id
if self.vendor is not None:
result['Vendor'] = self.vendor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BitRate') is not None:
self.bit_rate = m.get('BitRate')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DeviceAddress') is not None:
self.device_address = m.get('DeviceAddress')
if m.get('DeviceDirection') is not None:
self.device_direction = m.get('DeviceDirection')
if m.get('DeviceName') is not None:
self.device_name = m.get('DeviceName')
if m.get('DeviceResolution') is not None:
self.device_resolution = m.get('DeviceResolution')
if m.get('DeviceSite') is not None:
self.device_site = m.get('DeviceSite')
if m.get('DeviceType') is not None:
self.device_type = m.get('DeviceType')
if m.get('GbId') is not None:
self.gb_id = m.get('GbId')
if m.get('Vendor') is not None:
self.vendor = m.get('Vendor')
return self
class UpdateDeviceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class UpdateDeviceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UpdateDeviceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UpdateDeviceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UpdateDeviceCaptureStrategyRequest(TeaModel):
def __init__(
self,
device_code: str = None,
device_type: str = None,
monday_capture_strategy: str = None,
):
# 设备通道
self.device_code = device_code
# 设备类型
self.device_type = device_type
# 周一图片抓去模式
self.monday_capture_strategy = monday_capture_strategy
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_code is not None:
result['DeviceCode'] = self.device_code
if self.device_type is not None:
result['DeviceType'] = self.device_type
if self.monday_capture_strategy is not None:
result['MondayCaptureStrategy'] = self.monday_capture_strategy
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceCode') is not None:
self.device_code = m.get('DeviceCode')
if m.get('DeviceType') is not None:
self.device_type = m.get('DeviceType')
if m.get('MondayCaptureStrategy') is not None:
self.monday_capture_strategy = m.get('MondayCaptureStrategy')
return self
class UpdateDeviceCaptureStrategyResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
):
# 响应码
self.code = code
# 响应信息
self.message = message
# RequestId
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class UpdateDeviceCaptureStrategyResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UpdateDeviceCaptureStrategyResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UpdateDeviceCaptureStrategyResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UpdateDevicesStorageRequestUpdateStorageRequestsStorageDays(TeaModel):
def __init__(
self,
storage_days: int = None,
storage_type: str = None,
):
self.storage_days = storage_days
self.storage_type = storage_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.storage_days is not None:
result['StorageDays'] = self.storage_days
if self.storage_type is not None:
result['StorageType'] = self.storage_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('StorageDays') is not None:
self.storage_days = m.get('StorageDays')
if m.get('StorageType') is not None:
self.storage_type = m.get('StorageType')
return self
class UpdateDevicesStorageRequestUpdateStorageRequests(TeaModel):
def __init__(
self,
device_ids: str = None,
storage_days: List[UpdateDevicesStorageRequestUpdateStorageRequestsStorageDays] = None,
):
self.device_ids = device_ids
self.storage_days = storage_days
def validate(self):
if self.storage_days:
for k in self.storage_days:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_ids is not None:
result['DeviceIds'] = self.device_ids
result['StorageDays'] = []
if self.storage_days is not None:
for k in self.storage_days:
result['StorageDays'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceIds') is not None:
self.device_ids = m.get('DeviceIds')
self.storage_days = []
if m.get('StorageDays') is not None:
for k in m.get('StorageDays'):
temp_model = UpdateDevicesStorageRequestUpdateStorageRequestsStorageDays()
self.storage_days.append(temp_model.from_map(k))
return self
class UpdateDevicesStorageRequest(TeaModel):
def __init__(
self,
update_storage_requests: List[UpdateDevicesStorageRequestUpdateStorageRequests] = None,
):
self.update_storage_requests = update_storage_requests
def validate(self):
if self.update_storage_requests:
for k in self.update_storage_requests:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['UpdateStorageRequests'] = []
if self.update_storage_requests is not None:
for k in self.update_storage_requests:
result['UpdateStorageRequests'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
self.update_storage_requests = []
if m.get('UpdateStorageRequests') is not None:
for k in m.get('UpdateStorageRequests'):
temp_model = UpdateDevicesStorageRequestUpdateStorageRequests()
self.update_storage_requests.append(temp_model.from_map(k))
return self
class UpdateDevicesStorageShrinkRequest(TeaModel):
def __init__(
self,
update_storage_requests_shrink: str = None,
):
self.update_storage_requests_shrink = update_storage_requests_shrink
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.update_storage_requests_shrink is not None:
result['UpdateStorageRequests'] = self.update_storage_requests_shrink
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('UpdateStorageRequests') is not None:
self.update_storage_requests_shrink = m.get('UpdateStorageRequests')
return self
class UpdateDevicesStorageResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: bool = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class UpdateDevicesStorageResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UpdateDevicesStorageResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UpdateDevicesStorageResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UpdateDoubleVerificationGroupRequestDoubleVerificationGroupPersonIdList(TeaModel):
def __init__(
self,
person_id: str = None,
person_table_id: str = None,
):
self.person_id = person_id
self.person_table_id = person_table_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.person_table_id is not None:
result['PersonTableId'] = self.person_table_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('PersonTableId') is not None:
self.person_table_id = m.get('PersonTableId')
return self
class UpdateDoubleVerificationGroupRequestDoubleVerificationGroup(TeaModel):
def __init__(
self,
group_id: str = None,
interval: int = None,
member_number: int = None,
person_id_list: List[UpdateDoubleVerificationGroupRequestDoubleVerificationGroupPersonIdList] = None,
):
self.group_id = group_id
self.interval = interval
self.member_number = member_number
self.person_id_list = person_id_list
def validate(self):
if self.person_id_list:
for k in self.person_id_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.group_id is not None:
result['GroupId'] = self.group_id
if self.interval is not None:
result['Interval'] = self.interval
if self.member_number is not None:
result['MemberNumber'] = self.member_number
result['PersonIdList'] = []
if self.person_id_list is not None:
for k in self.person_id_list:
result['PersonIdList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('GroupId') is not None:
self.group_id = m.get('GroupId')
if m.get('Interval') is not None:
self.interval = m.get('Interval')
if m.get('MemberNumber') is not None:
self.member_number = m.get('MemberNumber')
self.person_id_list = []
if m.get('PersonIdList') is not None:
for k in m.get('PersonIdList'):
temp_model = UpdateDoubleVerificationGroupRequestDoubleVerificationGroupPersonIdList()
self.person_id_list.append(temp_model.from_map(k))
return self
class UpdateDoubleVerificationGroupRequest(TeaModel):
def __init__(
self,
double_verification_group: UpdateDoubleVerificationGroupRequestDoubleVerificationGroup = None,
id: str = None,
):
self.double_verification_group = double_verification_group
self.id = id
def validate(self):
if self.double_verification_group:
self.double_verification_group.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.double_verification_group is not None:
result['DoubleVerificationGroup'] = self.double_verification_group.to_map()
if self.id is not None:
result['Id'] = self.id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DoubleVerificationGroup') is not None:
temp_model = UpdateDoubleVerificationGroupRequestDoubleVerificationGroup()
self.double_verification_group = temp_model.from_map(m['DoubleVerificationGroup'])
if m.get('Id') is not None:
self.id = m.get('Id')
return self
class UpdateDoubleVerificationGroupResponseBodyDoubleVerificationGroupPersonIdList(TeaModel):
def __init__(
self,
person_id: str = None,
person_table_id: str = None,
):
self.person_id = person_id
self.person_table_id = person_table_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.person_id is not None:
result['PersonId'] = self.person_id
if self.person_table_id is not None:
result['PersonTableId'] = self.person_table_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('PersonId') is not None:
self.person_id = m.get('PersonId')
if m.get('PersonTableId') is not None:
self.person_table_id = m.get('PersonTableId')
return self
class UpdateDoubleVerificationGroupResponseBodyDoubleVerificationGroup(TeaModel):
def __init__(
self,
device_id: str = None,
enabled: str = None,
group_id: str = None,
interval: int = None,
last_change: str = None,
member_number: int = None,
person_id_list: List[UpdateDoubleVerificationGroupResponseBodyDoubleVerificationGroupPersonIdList] = None,
):
self.device_id = device_id
self.enabled = enabled
self.group_id = group_id
self.interval = interval
self.last_change = last_change
self.member_number = member_number
self.person_id_list = person_id_list
def validate(self):
if self.person_id_list:
for k in self.person_id_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_id is not None:
result['DeviceId'] = self.device_id
if self.enabled is not None:
result['Enabled'] = self.enabled
if self.group_id is not None:
result['GroupId'] = self.group_id
if self.interval is not None:
result['Interval'] = self.interval
if self.last_change is not None:
result['LastChange'] = self.last_change
if self.member_number is not None:
result['MemberNumber'] = self.member_number
result['PersonIdList'] = []
if self.person_id_list is not None:
for k in self.person_id_list:
result['PersonIdList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceId') is not None:
self.device_id = m.get('DeviceId')
if m.get('Enabled') is not None:
self.enabled = m.get('Enabled')
if m.get('GroupId') is not None:
self.group_id = m.get('GroupId')
if m.get('Interval') is not None:
self.interval = m.get('Interval')
if m.get('LastChange') is not None:
self.last_change = m.get('LastChange')
if m.get('MemberNumber') is not None:
self.member_number = m.get('MemberNumber')
self.person_id_list = []
if m.get('PersonIdList') is not None:
for k in m.get('PersonIdList'):
temp_model = UpdateDoubleVerificationGroupResponseBodyDoubleVerificationGroupPersonIdList()
self.person_id_list.append(temp_model.from_map(k))
return self
class UpdateDoubleVerificationGroupResponseBody(TeaModel):
def __init__(
self,
code: str = None,
double_verification_group: UpdateDoubleVerificationGroupResponseBodyDoubleVerificationGroup = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.double_verification_group = double_verification_group
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
if self.double_verification_group:
self.double_verification_group.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.double_verification_group is not None:
result['DoubleVerificationGroup'] = self.double_verification_group.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('DoubleVerificationGroup') is not None:
temp_model = UpdateDoubleVerificationGroupResponseBodyDoubleVerificationGroup()
self.double_verification_group = temp_model.from_map(m['DoubleVerificationGroup'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class UpdateDoubleVerificationGroupResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UpdateDoubleVerificationGroupResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UpdateDoubleVerificationGroupResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UpdateModelServiceRequest(TeaModel):
def __init__(
self,
client_token: str = None,
model_service_id: str = None,
model_service_name: str = None,
qps_required: int = None,
):
self.client_token = client_token
self.model_service_id = model_service_id
self.model_service_name = model_service_name
self.qps_required = qps_required
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.client_token is not None:
result['ClientToken'] = self.client_token
if self.model_service_id is not None:
result['ModelServiceId'] = self.model_service_id
if self.model_service_name is not None:
result['ModelServiceName'] = self.model_service_name
if self.qps_required is not None:
result['QpsRequired'] = self.qps_required
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ClientToken') is not None:
self.client_token = m.get('ClientToken')
if m.get('ModelServiceId') is not None:
self.model_service_id = m.get('ModelServiceId')
if m.get('ModelServiceName') is not None:
self.model_service_name = m.get('ModelServiceName')
if m.get('QpsRequired') is not None:
self.qps_required = m.get('QpsRequired')
return self
class UpdateModelServiceResponseBodyData(TeaModel):
def __init__(
self,
algorithm_code: str = None,
model_service_instance_id: str = None,
model_service_instance_name: str = None,
model_service_status: str = None,
qps_required: int = None,
):
self.algorithm_code = algorithm_code
self.model_service_instance_id = model_service_instance_id
self.model_service_instance_name = model_service_instance_name
self.model_service_status = model_service_status
self.qps_required = qps_required
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_code is not None:
result['AlgorithmCode'] = self.algorithm_code
if self.model_service_instance_id is not None:
result['ModelServiceInstanceId'] = self.model_service_instance_id
if self.model_service_instance_name is not None:
result['ModelServiceInstanceName'] = self.model_service_instance_name
if self.model_service_status is not None:
result['ModelServiceStatus'] = self.model_service_status
if self.qps_required is not None:
result['QpsRequired'] = self.qps_required
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmCode') is not None:
self.algorithm_code = m.get('AlgorithmCode')
if m.get('ModelServiceInstanceId') is not None:
self.model_service_instance_id = m.get('ModelServiceInstanceId')
if m.get('ModelServiceInstanceName') is not None:
self.model_service_instance_name = m.get('ModelServiceInstanceName')
if m.get('ModelServiceStatus') is not None:
self.model_service_status = m.get('ModelServiceStatus')
if m.get('QpsRequired') is not None:
self.qps_required = m.get('QpsRequired')
return self
class UpdateModelServiceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: UpdateModelServiceResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = UpdateModelServiceResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class UpdateModelServiceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UpdateModelServiceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UpdateModelServiceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UpdateMonitorRequest(TeaModel):
def __init__(
self,
algorithm_vendor: str = None,
attribute_name: str = None,
attribute_operate_type: str = None,
attribute_value_list: str = None,
corp_id: str = None,
description: str = None,
device_list: str = None,
device_operate_type: str = None,
notifier_app_secret: str = None,
notifier_extend_values: str = None,
notifier_time_out: int = None,
notifier_type: str = None,
notifier_url: str = None,
pic_list: str = None,
pic_operate_type: str = None,
rule_expression: str = None,
rule_name: str = None,
task_id: str = None,
):
self.algorithm_vendor = algorithm_vendor
self.attribute_name = attribute_name
self.attribute_operate_type = attribute_operate_type
self.attribute_value_list = attribute_value_list
self.corp_id = corp_id
self.description = description
self.device_list = device_list
self.device_operate_type = device_operate_type
self.notifier_app_secret = notifier_app_secret
self.notifier_extend_values = notifier_extend_values
self.notifier_time_out = notifier_time_out
self.notifier_type = notifier_type
self.notifier_url = notifier_url
self.pic_list = pic_list
self.pic_operate_type = pic_operate_type
self.rule_expression = rule_expression
self.rule_name = rule_name
self.task_id = task_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.algorithm_vendor is not None:
result['AlgorithmVendor'] = self.algorithm_vendor
if self.attribute_name is not None:
result['AttributeName'] = self.attribute_name
if self.attribute_operate_type is not None:
result['AttributeOperateType'] = self.attribute_operate_type
if self.attribute_value_list is not None:
result['AttributeValueList'] = self.attribute_value_list
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.description is not None:
result['Description'] = self.description
if self.device_list is not None:
result['DeviceList'] = self.device_list
if self.device_operate_type is not None:
result['DeviceOperateType'] = self.device_operate_type
if self.notifier_app_secret is not None:
result['NotifierAppSecret'] = self.notifier_app_secret
if self.notifier_extend_values is not None:
result['NotifierExtendValues'] = self.notifier_extend_values
if self.notifier_time_out is not None:
result['NotifierTimeOut'] = self.notifier_time_out
if self.notifier_type is not None:
result['NotifierType'] = self.notifier_type
if self.notifier_url is not None:
result['NotifierUrl'] = self.notifier_url
if self.pic_list is not None:
result['PicList'] = self.pic_list
if self.pic_operate_type is not None:
result['PicOperateType'] = self.pic_operate_type
if self.rule_expression is not None:
result['RuleExpression'] = self.rule_expression
if self.rule_name is not None:
result['RuleName'] = self.rule_name
if self.task_id is not None:
result['TaskId'] = self.task_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AlgorithmVendor') is not None:
self.algorithm_vendor = m.get('AlgorithmVendor')
if m.get('AttributeName') is not None:
self.attribute_name = m.get('AttributeName')
if m.get('AttributeOperateType') is not None:
self.attribute_operate_type = m.get('AttributeOperateType')
if m.get('AttributeValueList') is not None:
self.attribute_value_list = m.get('AttributeValueList')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('Description') is not None:
self.description = m.get('Description')
if m.get('DeviceList') is not None:
self.device_list = m.get('DeviceList')
if m.get('DeviceOperateType') is not None:
self.device_operate_type = m.get('DeviceOperateType')
if m.get('NotifierAppSecret') is not None:
self.notifier_app_secret = m.get('NotifierAppSecret')
if m.get('NotifierExtendValues') is not None:
self.notifier_extend_values = m.get('NotifierExtendValues')
if m.get('NotifierTimeOut') is not None:
self.notifier_time_out = m.get('NotifierTimeOut')
if m.get('NotifierType') is not None:
self.notifier_type = m.get('NotifierType')
if m.get('NotifierUrl') is not None:
self.notifier_url = m.get('NotifierUrl')
if m.get('PicList') is not None:
self.pic_list = m.get('PicList')
if m.get('PicOperateType') is not None:
self.pic_operate_type = m.get('PicOperateType')
if m.get('RuleExpression') is not None:
self.rule_expression = m.get('RuleExpression')
if m.get('RuleName') is not None:
self.rule_name = m.get('RuleName')
if m.get('TaskId') is not None:
self.task_id = m.get('TaskId')
return self
class UpdateMonitorResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class UpdateMonitorResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UpdateMonitorResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UpdateMonitorResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UpdateProfileRequest(TeaModel):
def __init__(
self,
biz_id: str = None,
catalog_id: int = None,
corp_id: str = None,
face_url: str = None,
gender: int = None,
id_number: str = None,
isv_sub_id: str = None,
live_address: str = None,
name: str = None,
phone_no: str = None,
plate_no: str = None,
profile_id: int = None,
scene_type: str = None,
):
self.biz_id = biz_id
self.catalog_id = catalog_id
self.corp_id = corp_id
self.face_url = face_url
self.gender = gender
self.id_number = id_number
self.isv_sub_id = isv_sub_id
self.live_address = live_address
self.name = name
self.phone_no = phone_no
self.plate_no = plate_no
self.profile_id = profile_id
self.scene_type = scene_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.biz_id is not None:
result['BizId'] = self.biz_id
if self.catalog_id is not None:
result['CatalogId'] = self.catalog_id
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.face_url is not None:
result['FaceUrl'] = self.face_url
if self.gender is not None:
result['Gender'] = self.gender
if self.id_number is not None:
result['IdNumber'] = self.id_number
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.live_address is not None:
result['LiveAddress'] = self.live_address
if self.name is not None:
result['Name'] = self.name
if self.phone_no is not None:
result['PhoneNo'] = self.phone_no
if self.plate_no is not None:
result['PlateNo'] = self.plate_no
if self.profile_id is not None:
result['ProfileId'] = self.profile_id
if self.scene_type is not None:
result['SceneType'] = self.scene_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('BizId') is not None:
self.biz_id = m.get('BizId')
if m.get('CatalogId') is not None:
self.catalog_id = m.get('CatalogId')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('FaceUrl') is not None:
self.face_url = m.get('FaceUrl')
if m.get('Gender') is not None:
self.gender = m.get('Gender')
if m.get('IdNumber') is not None:
self.id_number = m.get('IdNumber')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('LiveAddress') is not None:
self.live_address = m.get('LiveAddress')
if m.get('Name') is not None:
self.name = m.get('Name')
if m.get('PhoneNo') is not None:
self.phone_no = m.get('PhoneNo')
if m.get('PlateNo') is not None:
self.plate_no = m.get('PlateNo')
if m.get('ProfileId') is not None:
self.profile_id = m.get('ProfileId')
if m.get('SceneType') is not None:
self.scene_type = m.get('SceneType')
return self
class UpdateProfileResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class UpdateProfileResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UpdateProfileResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UpdateProfileResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UpdateProfileCatalogRequest(TeaModel):
def __init__(
self,
catalog_id: int = None,
catalog_name: str = None,
corp_id: str = None,
isv_sub_id: str = None,
):
self.catalog_id = catalog_id
self.catalog_name = catalog_name
self.corp_id = corp_id
self.isv_sub_id = isv_sub_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.catalog_id is not None:
result['CatalogId'] = self.catalog_id
if self.catalog_name is not None:
result['CatalogName'] = self.catalog_name
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CatalogId') is not None:
self.catalog_id = m.get('CatalogId')
if m.get('CatalogName') is not None:
self.catalog_name = m.get('CatalogName')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
return self
class UpdateProfileCatalogResponseBodyData(TeaModel):
def __init__(
self,
catalog_id: int = None,
catalog_name: str = None,
isv_sub_id: str = None,
parent_catalog_id: str = None,
profile_count: int = None,
):
self.catalog_id = catalog_id
self.catalog_name = catalog_name
self.isv_sub_id = isv_sub_id
self.parent_catalog_id = parent_catalog_id
self.profile_count = profile_count
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.catalog_id is not None:
result['CatalogId'] = self.catalog_id
if self.catalog_name is not None:
result['CatalogName'] = self.catalog_name
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.parent_catalog_id is not None:
result['ParentCatalogId'] = self.parent_catalog_id
if self.profile_count is not None:
result['ProfileCount'] = self.profile_count
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CatalogId') is not None:
self.catalog_id = m.get('CatalogId')
if m.get('CatalogName') is not None:
self.catalog_name = m.get('CatalogName')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('ParentCatalogId') is not None:
self.parent_catalog_id = m.get('ParentCatalogId')
if m.get('ProfileCount') is not None:
self.profile_count = m.get('ProfileCount')
return self
class UpdateProfileCatalogResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: UpdateProfileCatalogResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = UpdateProfileCatalogResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class UpdateProfileCatalogResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UpdateProfileCatalogResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UpdateProfileCatalogResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UpdateSearchTableRequest(TeaModel):
def __init__(
self,
search_table_id: str = None,
search_table_name: str = None,
):
self.search_table_id = search_table_id
self.search_table_name = search_table_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.search_table_id is not None:
result['SearchTableId'] = self.search_table_id
if self.search_table_name is not None:
result['SearchTableName'] = self.search_table_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('SearchTableId') is not None:
self.search_table_id = m.get('SearchTableId')
if m.get('SearchTableName') is not None:
self.search_table_name = m.get('SearchTableName')
return self
class UpdateSearchTableResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class UpdateSearchTableResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UpdateSearchTableResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UpdateSearchTableResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UpdateUserRequest(TeaModel):
def __init__(
self,
address: str = None,
age: int = None,
attachment: str = None,
biz_id: str = None,
corp_id: str = None,
face_image_content: str = None,
face_image_url: str = None,
gender: int = None,
id_number: str = None,
isv_sub_id: str = None,
phone_no: str = None,
plate_no: str = None,
user_group_id: int = None,
user_id: int = None,
user_name: str = None,
):
self.address = address
self.age = age
self.attachment = attachment
self.biz_id = biz_id
self.corp_id = corp_id
self.face_image_content = face_image_content
self.face_image_url = face_image_url
self.gender = gender
self.id_number = id_number
self.isv_sub_id = isv_sub_id
self.phone_no = phone_no
self.plate_no = plate_no
self.user_group_id = user_group_id
self.user_id = user_id
self.user_name = user_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.address is not None:
result['Address'] = self.address
if self.age is not None:
result['Age'] = self.age
if self.attachment is not None:
result['Attachment'] = self.attachment
if self.biz_id is not None:
result['BizId'] = self.biz_id
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.face_image_content is not None:
result['FaceImageContent'] = self.face_image_content
if self.face_image_url is not None:
result['FaceImageUrl'] = self.face_image_url
if self.gender is not None:
result['Gender'] = self.gender
if self.id_number is not None:
result['IdNumber'] = self.id_number
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.phone_no is not None:
result['PhoneNo'] = self.phone_no
if self.plate_no is not None:
result['PlateNo'] = self.plate_no
if self.user_group_id is not None:
result['UserGroupId'] = self.user_group_id
if self.user_id is not None:
result['UserId'] = self.user_id
if self.user_name is not None:
result['UserName'] = self.user_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Address') is not None:
self.address = m.get('Address')
if m.get('Age') is not None:
self.age = m.get('Age')
if m.get('Attachment') is not None:
self.attachment = m.get('Attachment')
if m.get('BizId') is not None:
self.biz_id = m.get('BizId')
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('FaceImageContent') is not None:
self.face_image_content = m.get('FaceImageContent')
if m.get('FaceImageUrl') is not None:
self.face_image_url = m.get('FaceImageUrl')
if m.get('Gender') is not None:
self.gender = m.get('Gender')
if m.get('IdNumber') is not None:
self.id_number = m.get('IdNumber')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('PhoneNo') is not None:
self.phone_no = m.get('PhoneNo')
if m.get('PlateNo') is not None:
self.plate_no = m.get('PlateNo')
if m.get('UserGroupId') is not None:
self.user_group_id = m.get('UserGroupId')
if m.get('UserId') is not None:
self.user_id = m.get('UserId')
if m.get('UserName') is not None:
self.user_name = m.get('UserName')
return self
class UpdateUserResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class UpdateUserResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UpdateUserResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UpdateUserResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UpdateUserGroupRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
isv_sub_id: str = None,
user_group_id: int = None,
user_group_name: str = None,
):
self.corp_id = corp_id
self.isv_sub_id = isv_sub_id
self.user_group_id = user_group_id
self.user_group_name = user_group_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.user_group_id is not None:
result['UserGroupId'] = self.user_group_id
if self.user_group_name is not None:
result['UserGroupName'] = self.user_group_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('UserGroupId') is not None:
self.user_group_id = m.get('UserGroupId')
if m.get('UserGroupName') is not None:
self.user_group_name = m.get('UserGroupName')
return self
class UpdateUserGroupResponseBodyData(TeaModel):
def __init__(
self,
isv_sub_id: str = None,
parent_user_group_id: str = None,
user_count: int = None,
user_group_id: int = None,
user_group_name: str = None,
):
self.isv_sub_id = isv_sub_id
self.parent_user_group_id = parent_user_group_id
self.user_count = user_count
self.user_group_id = user_group_id
self.user_group_name = user_group_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.isv_sub_id is not None:
result['IsvSubId'] = self.isv_sub_id
if self.parent_user_group_id is not None:
result['ParentUserGroupId'] = self.parent_user_group_id
if self.user_count is not None:
result['UserCount'] = self.user_count
if self.user_group_id is not None:
result['UserGroupId'] = self.user_group_id
if self.user_group_name is not None:
result['UserGroupName'] = self.user_group_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('IsvSubId') is not None:
self.isv_sub_id = m.get('IsvSubId')
if m.get('ParentUserGroupId') is not None:
self.parent_user_group_id = m.get('ParentUserGroupId')
if m.get('UserCount') is not None:
self.user_count = m.get('UserCount')
if m.get('UserGroupId') is not None:
self.user_group_id = m.get('UserGroupId')
if m.get('UserGroupName') is not None:
self.user_group_name = m.get('UserGroupName')
return self
class UpdateUserGroupResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: UpdateUserGroupResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = UpdateUserGroupResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class UpdateUserGroupResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UpdateUserGroupResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UpdateUserGroupResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UpdateWatchPolicyRequest(TeaModel):
def __init__(
self,
item_match_type: str = None,
similarity_threshold: float = None,
target_type: str = None,
watch_mode: str = None,
watch_policy_id: str = None,
watch_policy_name: str = None,
):
self.item_match_type = item_match_type
self.similarity_threshold = similarity_threshold
self.target_type = target_type
self.watch_mode = watch_mode
self.watch_policy_id = watch_policy_id
self.watch_policy_name = watch_policy_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.item_match_type is not None:
result['ItemMatchType'] = self.item_match_type
if self.similarity_threshold is not None:
result['SimilarityThreshold'] = self.similarity_threshold
if self.target_type is not None:
result['TargetType'] = self.target_type
if self.watch_mode is not None:
result['WatchMode'] = self.watch_mode
if self.watch_policy_id is not None:
result['WatchPolicyId'] = self.watch_policy_id
if self.watch_policy_name is not None:
result['WatchPolicyName'] = self.watch_policy_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ItemMatchType') is not None:
self.item_match_type = m.get('ItemMatchType')
if m.get('SimilarityThreshold') is not None:
self.similarity_threshold = m.get('SimilarityThreshold')
if m.get('TargetType') is not None:
self.target_type = m.get('TargetType')
if m.get('WatchMode') is not None:
self.watch_mode = m.get('WatchMode')
if m.get('WatchPolicyId') is not None:
self.watch_policy_id = m.get('WatchPolicyId')
if m.get('WatchPolicyName') is not None:
self.watch_policy_name = m.get('WatchPolicyName')
return self
class UpdateWatchPolicyResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class UpdateWatchPolicyResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UpdateWatchPolicyResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UpdateWatchPolicyResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UpdateWatchTaskRequest(TeaModel):
def __init__(
self,
description: str = None,
device_list: str = None,
message_receiver: str = None,
schedule_cycle_dates: str = None,
schedule_times: str = None,
schedule_type: str = None,
task_name: str = None,
watch_policy_ids: str = None,
watch_task_id: str = None,
):
self.description = description
self.device_list = device_list
self.message_receiver = message_receiver
self.schedule_cycle_dates = schedule_cycle_dates
self.schedule_times = schedule_times
self.schedule_type = schedule_type
self.task_name = task_name
self.watch_policy_ids = watch_policy_ids
self.watch_task_id = watch_task_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.description is not None:
result['Description'] = self.description
if self.device_list is not None:
result['DeviceList'] = self.device_list
if self.message_receiver is not None:
result['MessageReceiver'] = self.message_receiver
if self.schedule_cycle_dates is not None:
result['ScheduleCycleDates'] = self.schedule_cycle_dates
if self.schedule_times is not None:
result['ScheduleTimes'] = self.schedule_times
if self.schedule_type is not None:
result['ScheduleType'] = self.schedule_type
if self.task_name is not None:
result['TaskName'] = self.task_name
if self.watch_policy_ids is not None:
result['WatchPolicyIds'] = self.watch_policy_ids
if self.watch_task_id is not None:
result['WatchTaskId'] = self.watch_task_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Description') is not None:
self.description = m.get('Description')
if m.get('DeviceList') is not None:
self.device_list = m.get('DeviceList')
if m.get('MessageReceiver') is not None:
self.message_receiver = m.get('MessageReceiver')
if m.get('ScheduleCycleDates') is not None:
self.schedule_cycle_dates = m.get('ScheduleCycleDates')
if m.get('ScheduleTimes') is not None:
self.schedule_times = m.get('ScheduleTimes')
if m.get('ScheduleType') is not None:
self.schedule_type = m.get('ScheduleType')
if m.get('TaskName') is not None:
self.task_name = m.get('TaskName')
if m.get('WatchPolicyIds') is not None:
self.watch_policy_ids = m.get('WatchPolicyIds')
if m.get('WatchTaskId') is not None:
self.watch_task_id = m.get('WatchTaskId')
return self
class UpdateWatchTaskResponseBody(TeaModel):
def __init__(
self,
code: str = None,
message: str = None,
request_id: str = None,
success: bool = None,
):
self.code = code
self.message = message
# Id of the request
self.request_id = request_id
self.success = success
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.success is not None:
result['Success'] = self.success
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Success') is not None:
self.success = m.get('Success')
return self
class UpdateWatchTaskResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UpdateWatchTaskResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UpdateWatchTaskResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UploadFileRequest(TeaModel):
def __init__(
self,
corp_id: str = None,
data_source_id: str = None,
file_alias_name: str = None,
file_content: str = None,
file_name: str = None,
file_path: str = None,
file_type: str = None,
md5: str = None,
):
self.corp_id = corp_id
self.data_source_id = data_source_id
self.file_alias_name = file_alias_name
self.file_content = file_content
self.file_name = file_name
self.file_path = file_path
self.file_type = file_type
self.md5 = md5
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.corp_id is not None:
result['CorpId'] = self.corp_id
if self.data_source_id is not None:
result['DataSourceId'] = self.data_source_id
if self.file_alias_name is not None:
result['FileAliasName'] = self.file_alias_name
if self.file_content is not None:
result['FileContent'] = self.file_content
if self.file_name is not None:
result['FileName'] = self.file_name
if self.file_path is not None:
result['FilePath'] = self.file_path
if self.file_type is not None:
result['FileType'] = self.file_type
if self.md5 is not None:
result['MD5'] = self.md5
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('CorpId') is not None:
self.corp_id = m.get('CorpId')
if m.get('DataSourceId') is not None:
self.data_source_id = m.get('DataSourceId')
if m.get('FileAliasName') is not None:
self.file_alias_name = m.get('FileAliasName')
if m.get('FileContent') is not None:
self.file_content = m.get('FileContent')
if m.get('FileName') is not None:
self.file_name = m.get('FileName')
if m.get('FilePath') is not None:
self.file_path = m.get('FilePath')
if m.get('FileType') is not None:
self.file_type = m.get('FileType')
if m.get('MD5') is not None:
self.md5 = m.get('MD5')
return self
class UploadFileResponseBodyDataRecords(TeaModel):
def __init__(
self,
oss_path: str = None,
source_id: str = None,
):
self.oss_path = oss_path
self.source_id = source_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.oss_path is not None:
result['OssPath'] = self.oss_path
if self.source_id is not None:
result['SourceId'] = self.source_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('OssPath') is not None:
self.oss_path = m.get('OssPath')
if m.get('SourceId') is not None:
self.source_id = m.get('SourceId')
return self
class UploadFileResponseBodyData(TeaModel):
def __init__(
self,
records: List[UploadFileResponseBodyDataRecords] = None,
):
self.records = records
def validate(self):
if self.records:
for k in self.records:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['Records'] = []
if self.records is not None:
for k in self.records:
result['Records'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
self.records = []
if m.get('Records') is not None:
for k in m.get('Records'):
temp_model = UploadFileResponseBodyDataRecords()
self.records.append(temp_model.from_map(k))
return self
class UploadFileResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: UploadFileResponseBodyData = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
if self.data:
self.data.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data.to_map()
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
temp_model = UploadFileResponseBodyData()
self.data = temp_model.from_map(m['Data'])
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class UploadFileResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UploadFileResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UploadFileResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UploadImageRequest(TeaModel):
def __init__(
self,
image_url: str = None,
):
self.image_url = image_url
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.image_url is not None:
result['ImageUrl'] = self.image_url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('ImageUrl') is not None:
self.image_url = m.get('ImageUrl')
return self
class UploadImageResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: str = None,
message: str = None,
request_id: str = None,
):
self.code = code
self.data = data
self.message = message
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.data is not None:
result['Data'] = self.data
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Data') is not None:
self.data = m.get('Data')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class UploadImageResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: UploadImageResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = UploadImageResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class VerifyDeviceRequest(TeaModel):
def __init__(
self,
device_address: str = None,
file_path: str = None,
nvr_existed: int = None,
):
# 设备安装地址
self.device_address = device_address
# OSS路径
self.file_path = file_path
self.nvr_existed = nvr_existed
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.device_address is not None:
result['DeviceAddress'] = self.device_address
if self.file_path is not None:
result['FilePath'] = self.file_path
if self.nvr_existed is not None:
result['NvrExisted'] = self.nvr_existed
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('DeviceAddress') is not None:
self.device_address = m.get('DeviceAddress')
if m.get('FilePath') is not None:
self.file_path = m.get('FilePath')
if m.get('NvrExisted') is not None:
self.nvr_existed = m.get('NvrExisted')
return self
class VerifyDeviceResponseBodyData(TeaModel):
def __init__(
self,
code: str = None,
id: str = None,
message: str = None,
original_gb_id: str = None,
row_number: int = None,
suggest_gb_id: str = None,
):
# 格式错误或国标冲突的错误码,0为成功,-1为失败
self.code = code
# Excel中的序号列的值
self.id = id
# 格式错误或国标冲突提示
self.message = message
# 原始国标编码
self.original_gb_id = original_gb_id
# 记录所在行号
self.row_number = row_number
# 建议的国标编码
self.suggest_gb_id = suggest_gb_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
if self.id is not None:
result['Id'] = self.id
if self.message is not None:
result['Message'] = self.message
if self.original_gb_id is not None:
result['OriginalGbId'] = self.original_gb_id
if self.row_number is not None:
result['RowNumber'] = self.row_number
if self.suggest_gb_id is not None:
result['SuggestGbId'] = self.suggest_gb_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
if m.get('Id') is not None:
self.id = m.get('Id')
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('OriginalGbId') is not None:
self.original_gb_id = m.get('OriginalGbId')
if m.get('RowNumber') is not None:
self.row_number = m.get('RowNumber')
if m.get('SuggestGbId') is not None:
self.suggest_gb_id = m.get('SuggestGbId')
return self
class VerifyDeviceResponseBody(TeaModel):
def __init__(
self,
code: str = None,
data: List[VerifyDeviceResponseBodyData] = None,
message: str = None,
request_id: str = None,
):
# 响应码
self.code = code
self.data = data
# 响应码描述
self.message = message
# Id of the request
self.request_id = request_id
def validate(self):
if self.data:
for k in self.data:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.code is not None:
result['Code'] = self.code
result['Data'] = []
if self.data is not None:
for k in self.data:
result['Data'].append(k.to_map() if k else None)
if self.message is not None:
result['Message'] = self.message
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Code') is not None:
self.code = m.get('Code')
self.data = []
if m.get('Data') is not None:
for k in m.get('Data'):
temp_model = VerifyDeviceResponseBodyData()
self.data.append(temp_model.from_map(k))
if m.get('Message') is not None:
self.message = m.get('Message')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class VerifyDeviceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
status_code: int = None,
body: VerifyDeviceResponseBody = None,
):
self.headers = headers
self.status_code = status_code
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.status_code, 'status_code')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.status_code is not None:
result['statusCode'] = self.status_code
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('statusCode') is not None:
self.status_code = m.get('statusCode')
if m.get('body') is not None:
temp_model = VerifyDeviceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
|
PypiClean
|
/mutwo.music-0.24.0-py3-none-any.whl/mutwo/music_converters/loudness.py
|
import math
from mutwo_third_party import pydsm
from mutwo import core_constants
from mutwo import core_converters
from mutwo import core_events
from mutwo import music_converters
__all__ = ("LoudnessToAmplitude",)
class LoudnessToAmplitude(core_converters.abc.Converter):
"""Make an approximation of the needed amplitude for a perceived Loudness.
:param loudspeaker_frequency_response: Optionally the frequency response
of the used loudspeaker can be added for balancing out uneven curves in
the loudspeakers frequency response. The frequency response is defined
with a ``core_events.Envelope`` object.
:type loudspeaker_frequency_response: mutwo.core_events.Envelope
:param interpolation_order: The interpolation order of the equal loudness
contour interpolation.
:type interpolation_order: int
The converter works best with pure sine waves.
"""
def __init__(
self,
loudspeaker_frequency_response: core_events.Envelope = core_events.Envelope(
((0, 80), (2000, 80))
),
interpolation_order: int = 4,
):
self._interpolation_order = interpolation_order
self._loudspeaker_frequency_response = loudspeaker_frequency_response
self._loudspeaker_frequency_response_average = (
loudspeaker_frequency_response.get_average_value()
)
# ###################################################################### #
# static methods #
# ###################################################################### #
@staticmethod
def _decibel_to_amplitude_ratio(
decibel: core_constants.Real, reference_amplitude: core_constants.Real = 1
) -> float:
return float(reference_amplitude * (10 ** (decibel / 20)))
@staticmethod
def _decibel_to_power_ratio(decibel: core_constants.Real) -> float:
return float(10 ** (decibel / 10))
@staticmethod
def _sone_to_phon(loudness_in_sone: core_constants.Real) -> core_constants.Real:
# formula from http://www.sengpielaudio.com/calculatorSonephon.htm
if loudness_in_sone >= 1:
return 40 + (10 * math.log(loudness_in_sone, 2))
else:
return 40 * (loudness_in_sone + 0.0005) ** 0.35
# ###################################################################### #
# public methods for interaction with the user #
# ###################################################################### #
def convert(
self,
perceived_loudness_in_sone: core_constants.Real,
frequency: core_constants.Real,
) -> core_constants.Real:
"""Calculates the needed amplitude to reach a particular loudness for the entered frequency.
:param perceived_loudness_in_sone: The subjectively perceived loudness that
the resulting signal shall have (in the unit `Sone`).
:type perceived_loudness_in_sone: core_constants.Real
:param frequency: A frequency in Hertz for which the necessary amplitude
shall be calculated.
:return: Return the amplitude for a sine tone to reach the converters
loudness when played with the entered frequency.
**Example:**
>>> from mutwo import music_converters
>>> loudness_converter = music_converters.LoudnessToAmplitude()
>>> loudness_converter.convert(1, 200)
0.009364120303317933
>>> loudness_converter.convert(1, 50)
0.15497924558613232
"""
perceived_loudness_in_phon = self._sone_to_phon(perceived_loudness_in_sone)
equal_loudness_contour_interpolation = pydsm.pydsm.iso226.iso226_spl_itpl( # type: ignore
perceived_loudness_in_phon, self._interpolation_order
)
# (1) calculates necessary sound pressure level depending on the frequency
# and loudness (to get the same loudness over all frequencies)
sound_pressure_level_for_perceived_loudness_based_on_frequency = float(
equal_loudness_contour_interpolation(frequency)
)
# (2) figure out the produced soundpressure of the loudspeaker depending
# on the frequency (for balancing uneven frequency responses of
# loudspeakers)
produced_soundpressure_for_1_watt_1_meter_depending_on_loudspeaker = (
self._loudspeaker_frequency_response.value_at(frequency)
)
difference_to_average = (
self._loudspeaker_frequency_response_average
- produced_soundpressure_for_1_watt_1_meter_depending_on_loudspeaker
)
sound_pressure_level_for_pereived_loudness_based_on_speaker = (
sound_pressure_level_for_perceived_loudness_based_on_frequency
+ difference_to_average
)
amplitude_ratio = self._decibel_to_amplitude_ratio(
sound_pressure_level_for_pereived_loudness_based_on_speaker,
music_converters.constants.AUDITORY_THRESHOLD_AT_1KHZ,
)
return amplitude_ratio
|
PypiClean
|
/gtdb_to_taxdump-0.1.9-py3-none-any.whl/gtdb2td/Graph.py
|
from __future__ import print_function
# batteries
import os
import sys
import gzip
import bz2
import argparse
import logging
import csv
import urllib.request
import codecs
from collections import OrderedDict
class Graph(object):
def __init__(self, graph_dict=None):
""" initializes a graph object
If no dictionary or None is given,
an empty dictionary will be used
"""
self.__ranks = {'d__' : 'superkingdom',
'p__' : 'phylum',
'c__' : 'class',
'o__' : 'order',
'f__' : 'family',
'g__' : 'genus',
's__' : 'species'}
if graph_dict == None:
graph_dict = {}
self.__graph_dict = graph_dict
self.__graph_nodeIDs = {}
self.__seen = {}
def vertices(self):
""" returns the vertices of a graph """
return list(self.__graph_dict.keys())
def edges(self):
""" returns the edges of a graph """
return self.__generate_edges()
def add_vertex(self, vertex):
""" If the vertex "vertex" is not in
self.__graph_dict, a key "vertex" with an empty
list as a value is added to the dictionary.
Otherwise nothing has to be done.
"""
if vertex not in self.__graph_dict:
self.__graph_dict[vertex] = []
self.__graph_nodeIDs[vertex] = len(self.__graph_nodeIDs.keys())+1
def add_edge(self, vertex1, vertex2):
""" assumes that edge is of type set, tuple or list;
between two vertices can be multiple edges!
"""
try:
self.__graph_dict[vertex1].append(vertex2)
except KeyError:
self.__graph_dict[vertex1] = [vertex2]
def __generate_edges(self):
""" A static method generating the edges of the
graph "graph". Edges are represented as sets
with one (a loop back to the vertex) or two
vertices
"""
edges = []
for vertex in self.__graph_dict:
for neighbour in self.__graph_dict[vertex]:
if {neighbour, vertex} not in edges:
edges.append({vertex, neighbour})
return edges
def __str__(self):
res = "vertices: "
for k in self.__graph_dict.keys():
res += str(k) + " "
res += "\nvertex UIDs: "
for k in self.__graph_dict:
res += str(self.__graph_nodeIDs[k]) + " "
res += "\nedges: "
for edge in self.__generate_edges():
res += str(edge) + " "
return res
def get_rank(self, vertex):
""" Getting rank based on GTDB prefixes """
return self.__ranks.get(vertex[0:3], 'subspecies')
def iter_graph(self, vertex):
""" General iteration of all nodes in the graph """
if vertex == 'root':
self.__seen = {}
for child in self.__graph_dict[vertex]:
if child not in self.__seen:
print('Parent: {}; Child: {}'.format(vertex, child))
self.iter_graph(child)
self.__seen[child] = 1
def _write_dmp_iter(self, vertex, names, nodes, embl_code='XX'):
for child in self.__graph_dict[vertex]:
if child in self.__seen:
continue
self.__seen[child] = 1
# names
names.append([str(self.__graph_nodeIDs[child]), child, '',
'scientific name'])
# nodes
rank = self.get_rank(child)
nodes.append([self.__graph_nodeIDs[child], self.__graph_nodeIDs[vertex],
rank, embl_code, 0, 0, 11, 1, 1, 0, 0, 0])
# children
self._write_dmp_iter(child, names, nodes, embl_code)
def write_dmp(self, outdir='.', embl_code='XX'):
""" Writing names.dmp & nodes.dmp """
names_file = os.path.join(outdir, 'names.dmp')
nodes_file = os.path.join(outdir, 'nodes.dmp')
# iterating over all vertices starting at the root
## writing root
### names
names = [[str(self.__graph_nodeIDs['root']), 'all', '', 'synonym']]
names.append([str(self.__graph_nodeIDs['root']), 'root', '', 'scientific name'])
### nodes
nodes = [[self.__graph_nodeIDs['root'], 1, 'no rank', embl_code,
0, 0, 11, 1, 1, 0, 0, 0]]
## Child names & nodes
self._write_dmp_iter('root', names, nodes, embl_code)
# Sorting by taxID & writing
## names
with open(names_file, 'w') as outName:
for x in sorted(names, key = lambda x: int(x[0])):
outName.write('\t|\t'.join(x) + '\t|\n')
## nodes
with open(nodes_file, 'w') as outNode:
for x in sorted(nodes, key = lambda x: x[0]):
outNode.write('\t|\t'.join([str(xx) for xx in x]) + '\t|\n')
return names_file, nodes_file
def _to_tbl_iter(self, vertex):
for child in self.__graph_dict[vertex]:
if child in self.__seen:
continue
self.__seen[child] = 1
# tbl row
x = [str(self.__graph_nodeIDs[child]), child, self.get_rank(child)]
print('\t'.join(x))
# children
self._to_tbl_iter(child)
def to_tbl(self):
""" Writing table of values [taxID, name, rank] """
## writing header
x = ['taxID', 'name', 'rank']
print('\t'.join(x))
## writing root
self.__seen = {}
x = [str(self.__graph_nodeIDs['root']), 'root', 'no rank']
print('\t'.join(x))
self._to_tbl_iter('root')
def _to_tbl_iter_idx(self, vertex, idx):
for child in self.__graph_dict[vertex]:
if child in self.__seen:
continue
self.__seen[child] = 1
# tbl row
idx[child] = str(self.__graph_nodeIDs[child])
# children
self._to_tbl_iter_idx(child, idx)
def append_tbl(self, table_file, join_column):
""" appending to table """
# creating index: name : [taxID, rank]
idx = {}
self.__seen = {}
idx['root'] = str(self.__graph_nodeIDs['root'])
self._to_tbl_iter_idx('root', idx)
# appending to file
out_file = os.path.splitext(table_file)[0] + '_wTaxIDs.tsv'
header = OrderedDict()
with open(table_file) as inF, open(out_file, 'w') as outF:
for i,line in enumerate(inF):
line = line.rstrip().split('\t')
if i == 0:
header = {x:i for i,x in enumerate(line)}
header['gtdb_taxid'] = len(header.keys()) + 1
if not join_column in header.keys():
msg = 'Cannot find column "{}" in file: {}'
raise ValueError(msg.format(join_column, table_file))
outF.write('\t'.join(header) + '\n')
else:
acc = line[header[join_column]]
try:
line.append(idx[acc])
except KeyError:
msg = 'Cannot find "{}" in the taxID index'
logging.info(msg.format(acc))
line.append('NA')
outF.write('\t'.join(line) + '\n')
logging.info('File written: {}'.format(out_file))
def find_all_paths(self, start_vertex, end_vertex, path=[]):
"""
find all paths from start_vertex to end_vertex in graph
"""
graph = self.__graph_dict
path = path + [start_vertex]
if start_vertex == end_vertex:
return [path]
if start_vertex not in graph:
return []
paths = []
for vertex in graph[start_vertex]:
if vertex not in path:
extended_paths = self.find_all_paths(vertex,
end_vertex,
path)
for p in extended_paths:
paths.append(p)
return paths
|
PypiClean
|
/syntaxflow-0.1.0.tar.gz/syntaxflow-0.1.0/CONTRIBUTING.rst
|
.. highlight:: shell
============
Contributing
============
Contributions are welcome, and they are greatly appreciated! Every little bit
helps, and credit will always be given.
You can contribute in many ways:
Types of Contributions
----------------------
Report Bugs
~~~~~~~~~~~
Report bugs at https://github.com/howl-anderson/syntaxflow/issues.
If you are reporting a bug, please include:
* Your operating system name and version.
* Any details about your local setup that might be helpful in troubleshooting.
* Detailed steps to reproduce the bug.
Fix Bugs
~~~~~~~~
Look through the GitHub issues for bugs. Anything tagged with "bug" and "help
wanted" is open to whoever wants to implement it.
Implement Features
~~~~~~~~~~~~~~~~~~
Look through the GitHub issues for features. Anything tagged with "enhancement"
and "help wanted" is open to whoever wants to implement it.
Write Documentation
~~~~~~~~~~~~~~~~~~~
syntaxflow could always use more documentation, whether as part of the
official syntaxflow docs, in docstrings, or even on the web in blog posts,
articles, and such.
Submit Feedback
~~~~~~~~~~~~~~~
The best way to send feedback is to file an issue at https://github.com/howl-anderson/syntaxflow/issues.
If you are proposing a feature:
* Explain in detail how it would work.
* Keep the scope as narrow as possible, to make it easier to implement.
* Remember that this is a volunteer-driven project, and that contributions
are welcome :)
Get Started!
------------
Ready to contribute? Here's how to set up `syntaxflow` for local development.
1. Fork the `syntaxflow` repo on GitHub.
2. Clone your fork locally::
$ git clone [email protected]:your_name_here/syntaxflow.git
3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development::
$ mkvirtualenv syntaxflow
$ cd syntaxflow/
$ python setup.py develop
4. Create a branch for local development::
$ git checkout -b name-of-your-bugfix-or-feature
Now you can make your changes locally.
5. When you're done making changes, check that your changes pass flake8 and the
tests, including testing other Python versions with tox::
$ flake8 syntaxflow tests
$ python setup.py test or pytest
$ tox
To get flake8 and tox, just pip install them into your virtualenv.
6. Commit your changes and push your branch to GitHub::
$ git add .
$ git commit -m "Your detailed description of your changes."
$ git push origin name-of-your-bugfix-or-feature
7. Submit a pull request through the GitHub website.
Pull Request Guidelines
-----------------------
Before you submit a pull request, check that it meets these guidelines:
1. The pull request should include tests.
2. If the pull request adds functionality, the docs should be updated. Put
your new functionality into a function with a docstring, and add the
feature to the list in README.rst.
3. The pull request should work for Python 3.5, 3.6, 3.7 and 3.8, and for PyPy. Check
https://travis-ci.org/howl-anderson/syntaxflow/pull_requests
and make sure that the tests pass for all supported Python versions.
Tips
----
To run a subset of tests::
$ pytest tests.test_syntaxflow
Deploying
---------
A reminder for the maintainers on how to deploy.
Make sure all your changes are committed (including an entry in HISTORY.rst).
Then run::
$ bump2version patch # possible: major / minor / patch
$ git push
$ git push --tags
Travis will then deploy to PyPI if tests pass.
|
PypiClean
|
/dsws_client-0.2.0-py3-none-any.whl/dsws_client/client.py
|
import json
import logging
import sys
import urllib.parse
from typing import Any, Dict, List, Optional, Type, TypeVar, Union
import requests
from dsws_client.config import DSWSConfig
from dsws_client.ds_request import (
DSDataRequest,
DSDataType,
DSDate,
DSGetDataBundleRequest,
DSGetDataRequest,
DSGetTokenRequest,
DSInstrument,
DSRequest,
bundle_identifiers,
to_ds_dict,
)
from dsws_client.ds_response import (
DSGetDataBundleResponse,
DSGetDataResponse,
DSGetTokenResponse,
)
from dsws_client.exceptions import (
InvalidResponseError,
RequestFailedError,
)
from dsws_client.parse import ParsedResponse, responses_to_records
from dsws_client.value_objects import DateType, DSStringKVPair
logger = logging.getLogger(__name__)
ResponseCls = TypeVar("ResponseCls")
class DSWSClient:
"""Client for the DSWS web service."""
def __init__(self, username: str, password: str, **kwargs: Any) -> None:
"""
Initialize the client.
Args:
username: DSWS username.
password: DSWS password.
**kwargs: Additional keyword arguments passed to the config object.
"""
config = DSWSConfig(**kwargs)
self._username = username
self._password = password
self._url = urllib.parse.urljoin(config.base_url, config.path)
self._session = requests.Session()
self._proxies = (
None
if config.proxy is None
else {"http": config.proxy, "https": config.proxy}
)
self._timeout = config.timeout
self._ssl_cert = config.ssl_cert
self._token_response: Optional[DSGetTokenResponse] = None
self._app_id = config.app_id
self._data_source = config.data_source
self._debug = config.debug
@property
def token(self) -> str:
"""Get a token."""
if self._token_response is None or self._token_response.is_expired:
self._token_response = self.get_token()
return self._token_response.token_value
def fetch_snapshot_data(
self,
identifiers: List[str],
fields: List[str],
start: DateType = "",
tag: Optional[str] = None,
) -> ParsedResponse:
"""Fetch snapshot data."""
responses = self.fetch_all(
identifiers=identifiers,
fields=fields,
start=start,
end="",
frequency=None,
kind=0,
tag=tag,
return_symbol_names=True,
return_field_names=True,
)
data_responses = []
for response in responses:
data_responses.extend(response.data_responses)
return responses_to_records(data_responses)
def fetch_timeseries_data( # noqa: PLR0913
self,
identifiers: List[str],
fields: List[str],
start: DateType = "",
end: DateType = "",
frequency: str = "D",
tag: Optional[str] = None,
) -> ParsedResponse:
"""Fetch timeseries data."""
responses = self.fetch_all(
identifiers=identifiers,
fields=fields,
start=start,
end=end,
frequency=frequency,
kind=1,
tag=tag,
return_symbol_names=True,
return_field_names=True,
)
data_responses = []
for response in responses:
data_responses.extend(response.data_responses)
return responses_to_records(data_responses)
def fetch_one( # noqa: PLR0913
self,
identifiers: Union[str, List[str]],
fields: List[str],
start: DateType,
end: DateType = "",
frequency: str = "D",
kind: int = 1,
tag: Optional[str] = None,
*,
return_symbol_names: bool = False,
return_field_names: bool = False,
instrument_props: Optional[Dict[str, str]] = None,
field_props: Optional[Dict[str, str]] = None,
) -> DSGetDataResponse:
"""Fetch data from the DSWS web service."""
request = self.construct_request(
identifiers=identifiers,
fields=fields,
start=start,
end=end,
frequency=frequency,
kind=kind,
tag=tag,
return_symbol_names=return_symbol_names,
return_field_names=return_field_names,
instrument_props=instrument_props,
field_props=field_props,
)
return self.get_data(request)
def fetch_all( # noqa: PLR0913
self,
identifiers: List[str],
fields: List[str],
start: DateType,
end: DateType = "",
frequency: Optional[str] = "D",
kind: int = 1,
tag: Optional[str] = None,
*,
return_symbol_names: bool = False,
return_field_names: bool = False,
instrument_props: Optional[Dict[str, str]] = None,
field_props: Optional[Dict[str, str]] = None,
) -> List[DSGetDataBundleResponse]:
"""Fetch as many bundles as needed to get all items."""
instrument = DSInstrument.construct(
identifiers,
return_names=return_symbol_names,
properties=instrument_props,
)
data_types = [
DSDataType.construct(
field,
return_names=return_field_names,
properties=field_props,
)
for field in fields
]
date = DSDate(start, end, frequency, kind)
identifier_bundles = bundle_identifiers(instrument, len(data_types))
responses = []
for identifier_bundle in identifier_bundles:
data_requests = []
for instrument in identifier_bundle:
data_requests.append(
DSDataRequest(instrument, data_types, date, tag=tag)
)
responses.append(self.get_data_bundle(data_requests))
return responses
def get_token(self, **kwargs: Any) -> DSGetTokenResponse:
"""
Fetch a new token.
Args:
**kwargs: Additional properties to set on the request.
Returns:
A token response.
"""
return self._execute_request(
DSGetTokenRequest(self._username, self._password, properties=kwargs),
DSGetTokenResponse,
)
def get_data(self, data_request: DSDataRequest, **kwargs: Any) -> DSGetDataResponse:
"""
Post a data request.
Args:
data_request: A data request.
**kwargs: Additional properties to set on the request.
Returns:
A data response.
"""
return self._execute_request(
DSGetDataRequest(
token_value=self.token,
data_request=data_request,
properties=kwargs,
),
DSGetDataResponse,
)
def get_data_bundle(
self,
data_requests: List[DSDataRequest],
**kwargs: Any,
) -> DSGetDataBundleResponse:
"""
Post multiple data requests.
Args:
data_requests: A list of data requests.
**kwargs: Additional properties to set on the request.
Returns:
A data bundle response.
"""
return self._execute_request(
DSGetDataBundleRequest(
token_value=self.token,
data_requests=data_requests,
properties=kwargs,
),
DSGetDataBundleResponse,
)
def construct_request( # noqa: PLR0913
self,
identifiers: Union[str, List[str]],
fields: List[str],
start: DateType,
end: DateType,
frequency: Optional[str],
kind: int,
tag: Optional[str] = None,
*,
return_symbol_names: bool = False,
return_field_names: bool = False,
instrument_props: Optional[Dict[str, str]] = None,
field_props: Optional[Dict[str, str]] = None,
) -> DSDataRequest:
"""Construct a data request."""
instrument = DSInstrument.construct(
identifiers,
return_names=return_symbol_names,
properties=instrument_props,
)
data_types = [
DSDataType.construct(
field,
return_names=return_field_names,
properties=field_props,
)
for field in fields
]
date = DSDate(start, end, frequency, kind)
return DSDataRequest(instrument, data_types, date, tag)
def _execute_request(
self,
request: DSRequest,
response_cls: Type[ResponseCls],
) -> ResponseCls:
"""Execute a request."""
if self._app_id is not None:
request.properties.append(DSStringKVPair("__AppId", self._app_id))
if self._data_source is not None:
request.properties.append(DSStringKVPair("Source", self._data_source))
request_url = urllib.parse.urljoin(self._url, request.path)
request_dict = to_ds_dict(request)
if self._debug:
sys.stdout.write(f"sending request: {request_dict!s}")
response = self._session.post(
request_url,
json=request_dict,
proxies=self._proxies,
verify=self._ssl_cert,
timeout=self._timeout,
)
if not response.ok:
msg = f"request failed: {response.text}"
raise RequestFailedError(msg, response.status_code)
try:
json_response = response.json()
except json.JSONDecodeError as exc:
msg = f"invalid response: {response.text}"
raise InvalidResponseError(msg) from exc
if self._debug:
sys.stdout.write(f"received response: {json_response!s}")
return response_cls(**json_response)
|
PypiClean
|
/pulumi_azure_native-2.5.1a1693590910.tar.gz/pulumi_azure_native-2.5.1a1693590910/pulumi_azure_native/operationalinsights/v20210601/cluster.py
|
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['ClusterArgs', 'Cluster']
@pulumi.input_type
class ClusterArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
billing_type: Optional[pulumi.Input[Union[str, 'BillingType']]] = None,
cluster_name: Optional[pulumi.Input[str]] = None,
identity: Optional[pulumi.Input['IdentityArgs']] = None,
is_availability_zones_enabled: Optional[pulumi.Input[bool]] = None,
is_double_encryption_enabled: Optional[pulumi.Input[bool]] = None,
key_vault_properties: Optional[pulumi.Input['KeyVaultPropertiesArgs']] = None,
location: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input['ClusterSkuArgs']] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a Cluster resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input[Union[str, 'BillingType']] billing_type: The cluster's billing type.
:param pulumi.Input[str] cluster_name: The name of the Log Analytics cluster.
:param pulumi.Input['IdentityArgs'] identity: The identity of the resource.
:param pulumi.Input[bool] is_availability_zones_enabled: Sets whether the cluster will support availability zones. This can be set as true only in regions where Azure Data Explorer support Availability Zones. This Property can not be modified after cluster creation. Default value is 'true' if region supports Availability Zones.
:param pulumi.Input[bool] is_double_encryption_enabled: Configures whether cluster will use double encryption. This Property can not be modified after cluster creation. Default value is 'true'
:param pulumi.Input['KeyVaultPropertiesArgs'] key_vault_properties: The associated key properties.
:param pulumi.Input[str] location: The geo-location where the resource lives
:param pulumi.Input['ClusterSkuArgs'] sku: The sku properties.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
if billing_type is not None:
pulumi.set(__self__, "billing_type", billing_type)
if cluster_name is not None:
pulumi.set(__self__, "cluster_name", cluster_name)
if identity is not None:
pulumi.set(__self__, "identity", identity)
if is_availability_zones_enabled is not None:
pulumi.set(__self__, "is_availability_zones_enabled", is_availability_zones_enabled)
if is_double_encryption_enabled is not None:
pulumi.set(__self__, "is_double_encryption_enabled", is_double_encryption_enabled)
if key_vault_properties is not None:
pulumi.set(__self__, "key_vault_properties", key_vault_properties)
if location is not None:
pulumi.set(__self__, "location", location)
if sku is not None:
pulumi.set(__self__, "sku", sku)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group. The name is case insensitive.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="billingType")
def billing_type(self) -> Optional[pulumi.Input[Union[str, 'BillingType']]]:
"""
The cluster's billing type.
"""
return pulumi.get(self, "billing_type")
@billing_type.setter
def billing_type(self, value: Optional[pulumi.Input[Union[str, 'BillingType']]]):
pulumi.set(self, "billing_type", value)
@property
@pulumi.getter(name="clusterName")
def cluster_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Log Analytics cluster.
"""
return pulumi.get(self, "cluster_name")
@cluster_name.setter
def cluster_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_name", value)
@property
@pulumi.getter
def identity(self) -> Optional[pulumi.Input['IdentityArgs']]:
"""
The identity of the resource.
"""
return pulumi.get(self, "identity")
@identity.setter
def identity(self, value: Optional[pulumi.Input['IdentityArgs']]):
pulumi.set(self, "identity", value)
@property
@pulumi.getter(name="isAvailabilityZonesEnabled")
def is_availability_zones_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Sets whether the cluster will support availability zones. This can be set as true only in regions where Azure Data Explorer support Availability Zones. This Property can not be modified after cluster creation. Default value is 'true' if region supports Availability Zones.
"""
return pulumi.get(self, "is_availability_zones_enabled")
@is_availability_zones_enabled.setter
def is_availability_zones_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_availability_zones_enabled", value)
@property
@pulumi.getter(name="isDoubleEncryptionEnabled")
def is_double_encryption_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Configures whether cluster will use double encryption. This Property can not be modified after cluster creation. Default value is 'true'
"""
return pulumi.get(self, "is_double_encryption_enabled")
@is_double_encryption_enabled.setter
def is_double_encryption_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_double_encryption_enabled", value)
@property
@pulumi.getter(name="keyVaultProperties")
def key_vault_properties(self) -> Optional[pulumi.Input['KeyVaultPropertiesArgs']]:
"""
The associated key properties.
"""
return pulumi.get(self, "key_vault_properties")
@key_vault_properties.setter
def key_vault_properties(self, value: Optional[pulumi.Input['KeyVaultPropertiesArgs']]):
pulumi.set(self, "key_vault_properties", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The geo-location where the resource lives
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def sku(self) -> Optional[pulumi.Input['ClusterSkuArgs']]:
"""
The sku properties.
"""
return pulumi.get(self, "sku")
@sku.setter
def sku(self, value: Optional[pulumi.Input['ClusterSkuArgs']]):
pulumi.set(self, "sku", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class Cluster(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
billing_type: Optional[pulumi.Input[Union[str, 'BillingType']]] = None,
cluster_name: Optional[pulumi.Input[str]] = None,
identity: Optional[pulumi.Input[pulumi.InputType['IdentityArgs']]] = None,
is_availability_zones_enabled: Optional[pulumi.Input[bool]] = None,
is_double_encryption_enabled: Optional[pulumi.Input[bool]] = None,
key_vault_properties: Optional[pulumi.Input[pulumi.InputType['KeyVaultPropertiesArgs']]] = None,
location: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[pulumi.InputType['ClusterSkuArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
The top level Log Analytics cluster resource container.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Union[str, 'BillingType']] billing_type: The cluster's billing type.
:param pulumi.Input[str] cluster_name: The name of the Log Analytics cluster.
:param pulumi.Input[pulumi.InputType['IdentityArgs']] identity: The identity of the resource.
:param pulumi.Input[bool] is_availability_zones_enabled: Sets whether the cluster will support availability zones. This can be set as true only in regions where Azure Data Explorer support Availability Zones. This Property can not be modified after cluster creation. Default value is 'true' if region supports Availability Zones.
:param pulumi.Input[bool] is_double_encryption_enabled: Configures whether cluster will use double encryption. This Property can not be modified after cluster creation. Default value is 'true'
:param pulumi.Input[pulumi.InputType['KeyVaultPropertiesArgs']] key_vault_properties: The associated key properties.
:param pulumi.Input[str] location: The geo-location where the resource lives
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input[pulumi.InputType['ClusterSkuArgs']] sku: The sku properties.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ClusterArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
The top level Log Analytics cluster resource container.
:param str resource_name: The name of the resource.
:param ClusterArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ClusterArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
billing_type: Optional[pulumi.Input[Union[str, 'BillingType']]] = None,
cluster_name: Optional[pulumi.Input[str]] = None,
identity: Optional[pulumi.Input[pulumi.InputType['IdentityArgs']]] = None,
is_availability_zones_enabled: Optional[pulumi.Input[bool]] = None,
is_double_encryption_enabled: Optional[pulumi.Input[bool]] = None,
key_vault_properties: Optional[pulumi.Input[pulumi.InputType['KeyVaultPropertiesArgs']]] = None,
location: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[pulumi.InputType['ClusterSkuArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ClusterArgs.__new__(ClusterArgs)
__props__.__dict__["billing_type"] = billing_type
__props__.__dict__["cluster_name"] = cluster_name
__props__.__dict__["identity"] = identity
__props__.__dict__["is_availability_zones_enabled"] = is_availability_zones_enabled
__props__.__dict__["is_double_encryption_enabled"] = is_double_encryption_enabled
__props__.__dict__["key_vault_properties"] = key_vault_properties
__props__.__dict__["location"] = location
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["sku"] = sku
__props__.__dict__["tags"] = tags
__props__.__dict__["associated_workspaces"] = None
__props__.__dict__["capacity_reservation_properties"] = None
__props__.__dict__["cluster_id"] = None
__props__.__dict__["created_date"] = None
__props__.__dict__["last_modified_date"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-native:operationalinsights:Cluster"), pulumi.Alias(type_="azure-native:operationalinsights/v20190801preview:Cluster"), pulumi.Alias(type_="azure-native:operationalinsights/v20200301preview:Cluster"), pulumi.Alias(type_="azure-native:operationalinsights/v20200801:Cluster"), pulumi.Alias(type_="azure-native:operationalinsights/v20201001:Cluster")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Cluster, __self__).__init__(
'azure-native:operationalinsights/v20210601:Cluster',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Cluster':
"""
Get an existing Cluster resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = ClusterArgs.__new__(ClusterArgs)
__props__.__dict__["associated_workspaces"] = None
__props__.__dict__["billing_type"] = None
__props__.__dict__["capacity_reservation_properties"] = None
__props__.__dict__["cluster_id"] = None
__props__.__dict__["created_date"] = None
__props__.__dict__["identity"] = None
__props__.__dict__["is_availability_zones_enabled"] = None
__props__.__dict__["key_vault_properties"] = None
__props__.__dict__["last_modified_date"] = None
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["sku"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
return Cluster(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="associatedWorkspaces")
def associated_workspaces(self) -> pulumi.Output[Optional[Sequence['outputs.AssociatedWorkspaceResponse']]]:
"""
The list of Log Analytics workspaces associated with the cluster
"""
return pulumi.get(self, "associated_workspaces")
@property
@pulumi.getter(name="billingType")
def billing_type(self) -> pulumi.Output[Optional[str]]:
"""
The cluster's billing type.
"""
return pulumi.get(self, "billing_type")
@property
@pulumi.getter(name="capacityReservationProperties")
def capacity_reservation_properties(self) -> pulumi.Output[Optional['outputs.CapacityReservationPropertiesResponse']]:
"""
Additional properties for capacity reservation
"""
return pulumi.get(self, "capacity_reservation_properties")
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> pulumi.Output[str]:
"""
The ID associated with the cluster.
"""
return pulumi.get(self, "cluster_id")
@property
@pulumi.getter(name="createdDate")
def created_date(self) -> pulumi.Output[str]:
"""
The cluster creation time
"""
return pulumi.get(self, "created_date")
@property
@pulumi.getter
def identity(self) -> pulumi.Output[Optional['outputs.IdentityResponse']]:
"""
The identity of the resource.
"""
return pulumi.get(self, "identity")
@property
@pulumi.getter(name="isAvailabilityZonesEnabled")
def is_availability_zones_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Sets whether the cluster will support availability zones. This can be set as true only in regions where Azure Data Explorer support Availability Zones. This Property can not be modified after cluster creation. Default value is 'true' if region supports Availability Zones.
"""
return pulumi.get(self, "is_availability_zones_enabled")
@property
@pulumi.getter(name="keyVaultProperties")
def key_vault_properties(self) -> pulumi.Output[Optional['outputs.KeyVaultPropertiesResponse']]:
"""
The associated key properties.
"""
return pulumi.get(self, "key_vault_properties")
@property
@pulumi.getter(name="lastModifiedDate")
def last_modified_date(self) -> pulumi.Output[str]:
"""
The last time the cluster was updated.
"""
return pulumi.get(self, "last_modified_date")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
The geo-location where the resource lives
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
The provisioning state of the cluster.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def sku(self) -> pulumi.Output[Optional['outputs.ClusterSkuResponse']]:
"""
The sku properties.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
|
PypiClean
|
/pyDVL-0.6.1.tar.gz/pyDVL-0.6.1/src/pydvl/utils/parallel/map_reduce.py
|
import inspect
from functools import singledispatch, update_wrapper
from itertools import accumulate, repeat
from typing import (
Any,
Callable,
Dict,
Generic,
List,
Optional,
Sequence,
TypeVar,
Union,
)
import numpy as np
import ray
from numpy.typing import NDArray
from ray import ObjectRef
from ..config import ParallelConfig
from ..types import maybe_add_argument
from .backend import init_parallel_backend
__all__ = ["MapReduceJob"]
T = TypeVar("T")
R = TypeVar("R")
Identity = lambda x, *args, **kwargs: x
MapFunction = Callable[..., R]
ReduceFunction = Callable[[List[R]], R]
ChunkifyInputType = Union[NDArray[T], Sequence[T], T]
def _wrap_func_with_remote_args(func: Callable, *, timeout: Optional[float] = None):
def wrapper(*args, **kwargs):
args = list(args)
for i, v in enumerate(args[:]):
args[i] = _get_value(v, timeout=timeout)
for k, v in kwargs.items():
kwargs[k] = _get_value(v, timeout=timeout)
return func(*args, **kwargs)
try:
inspect.signature(func)
wrapper = update_wrapper(wrapper, func)
except ValueError:
# Doing it manually here because using update_wrapper from functools
# on numpy functions doesn't work with ray for some unknown reason.
wrapper.__name__ = func.__name__
wrapper.__qualname__ = func.__qualname__
wrapper.__doc__ = func.__doc__
return wrapper
@singledispatch
def _get_value(v: Any, *, timeout: Optional[float] = None) -> Any:
return v
@_get_value.register
def _(v: ObjectRef, *, timeout: Optional[float] = None) -> Any:
return ray.get(v, timeout=timeout)
@_get_value.register
def _(v: np.ndarray, *, timeout: Optional[float] = None) -> NDArray:
return v
# Careful to use list as hint. The dispatch does not work with typing generics
@_get_value.register
def _(v: list, *, timeout: Optional[float] = None) -> List[Any]:
return [_get_value(x, timeout=timeout) for x in v]
class MapReduceJob(Generic[T, R]):
"""Takes an embarrassingly parallel fun and runs it in ``n_jobs`` parallel
jobs, splitting the data evenly into a number of chunks equal to the number of jobs.
Typing information for objects of this class requires the type of the inputs
that are split for ``map_func`` and the type of its output.
:param inputs: The input that will be split and passed to `map_func`.
if it's not a sequence object. It will be repeat ``n_jobs`` number of times.
:param map_func: Function that will be applied to the input chunks in each job.
:param reduce_func: Function that will be applied to the results of
``map_func`` to reduce them.
:param map_kwargs: Keyword arguments that will be passed to ``map_func`` in
each job. Alternatively, one can use ``itertools.partial``.
:param reduce_kwargs: Keyword arguments that will be passed to ``reduce_func``
in each job. Alternatively, one can use :func:`itertools.partial`.
:param config: Instance of :class:`~pydvl.utils.config.ParallelConfig`
with cluster address, number of cpus, etc.
:param n_jobs: Number of parallel jobs to run. Does not accept 0
:param timeout: Amount of time in seconds to wait for remote results before
... TODO
:param max_parallel_tasks: Maximum number of jobs to start in parallel. Any
tasks above this number won't be submitted to the backend before some
are done. This is to avoid swamping the work queue. Note that tasks have
a low memory footprint, so this is probably not a big concern, except
in the case of an infinite stream (not the case for MapReduceJob). See
https://docs.ray.io/en/latest/ray-core/patterns/limit-pending-tasks.html
:Examples:
A simple usage example with 2 jobs:
>>> from pydvl.utils.parallel import MapReduceJob
>>> import numpy as np
>>> map_reduce_job: MapReduceJob[np.ndarray, np.ndarray] = MapReduceJob(
... np.arange(5),
... map_func=np.sum,
... reduce_func=np.sum,
... n_jobs=2,
... )
>>> map_reduce_job()
10
When passed a single object as input, it will be repeated for each job:
>>> from pydvl.utils.parallel import MapReduceJob
>>> import numpy as np
>>> map_reduce_job: MapReduceJob[int, np.ndarray] = MapReduceJob(
... 5,
... map_func=lambda x: np.array([x]),
... reduce_func=np.sum,
... n_jobs=4,
... )
>>> map_reduce_job()
20
"""
def __init__(
self,
inputs: Union[Sequence[T], T],
map_func: MapFunction[R],
reduce_func: Optional[ReduceFunction[R]] = None,
map_kwargs: Optional[Dict] = None,
reduce_kwargs: Optional[Dict] = None,
config: ParallelConfig = ParallelConfig(),
*,
n_jobs: int = -1,
timeout: Optional[float] = None,
max_parallel_tasks: Optional[int] = None,
):
self.config = config
parallel_backend = init_parallel_backend(self.config)
self.parallel_backend = parallel_backend
self.timeout = timeout
self._n_jobs = 1
# This uses the setter defined below
self.n_jobs = n_jobs
self.max_parallel_tasks = max_parallel_tasks
self.inputs_ = inputs
if reduce_func is None:
reduce_func = Identity
if map_kwargs is None:
self.map_kwargs = dict()
else:
self.map_kwargs = {
k: self.parallel_backend.put(v) for k, v in map_kwargs.items()
}
if reduce_kwargs is None:
self.reduce_kwargs = dict()
else:
self.reduce_kwargs = {
k: self.parallel_backend.put(v) for k, v in reduce_kwargs.items()
}
self._map_func = maybe_add_argument(map_func, "job_id")
self._reduce_func = reduce_func
def __call__(
self,
) -> R:
map_results = self.map(self.inputs_)
reduce_results = self.reduce(map_results)
return reduce_results
def map(self, inputs: Union[Sequence[T], T]) -> List["ObjectRef[R]"]:
"""Splits the input data into chunks and calls a wrapped :func:`map_func` on them."""
map_results: List["ObjectRef[R]"] = []
map_func = self._wrap_function(self._map_func)
total_n_jobs = 0
total_n_finished = 0
chunks = self._chunkify(inputs, n_chunks=self.n_jobs)
for j, next_chunk in enumerate(chunks):
result = map_func(next_chunk, job_id=j, **self.map_kwargs)
map_results.append(result)
total_n_jobs += 1
total_n_finished = self._backpressure(
map_results,
n_dispatched=total_n_jobs,
n_finished=total_n_finished,
)
return map_results
def reduce(self, chunks: List["ObjectRef[R]"]) -> R:
"""Reduces the resulting chunks from a call to :meth:`~pydvl.utils.parallel.map_reduce.MapReduceJob.map`
by passing them to a wrapped :func:`reduce_func`."""
reduce_func = self._wrap_function(self._reduce_func)
reduce_result = reduce_func(chunks, **self.reduce_kwargs)
result = self.parallel_backend.get(reduce_result, timeout=self.timeout)
return result # type: ignore
def _wrap_function(self, func: Callable, **kwargs) -> Callable:
"""Wraps a function with a timeout and remote arguments and puts it on
the remote backend.
:param func: Function to wrap
:param kwargs: Additional keyword arguments to pass to the backend
wrapper. These are *not* arguments for the wrapped function.
:return: Remote function that can be called with the same arguments as
the wrapped function. Depending on the backend, this may simply be
the function itself.
"""
return self.parallel_backend.wrap(
_wrap_func_with_remote_args(func, timeout=self.timeout), **kwargs
)
def _backpressure(
self, jobs: List[ObjectRef], n_dispatched: int, n_finished: int
) -> int:
"""This is used to limit the number of concurrent tasks.
If :attr:`~pydvl.utils.parallel.map_reduce.MapReduceJob.max_parallel_tasks` is None then this function
is a no-op that simply returns 0.
See https://docs.ray.io/en/latest/ray-core/patterns/limit-pending-tasks.html
:param jobs:
:param n_dispatched:
:param n_finished:
:return:
"""
if self.max_parallel_tasks is None:
return 0
while (n_in_flight := n_dispatched - n_finished) > self.max_parallel_tasks:
wait_for_num_jobs = n_in_flight - self.max_parallel_tasks
finished_jobs, _ = self.parallel_backend.wait(
jobs,
num_returns=wait_for_num_jobs,
timeout=10, # FIXME make parameter?
)
n_finished += len(finished_jobs)
return n_finished
def _chunkify(self, data: ChunkifyInputType, n_chunks: int) -> List["ObjectRef[T]"]:
"""If data is a Sequence, it splits it into Sequences of size `n_chunks` for each job that we call chunks.
If instead data is an `ObjectRef` instance, then it yields it repeatedly `n_chunks` number of times.
"""
if n_chunks <= 0:
raise ValueError("Number of chunks should be greater than 0")
if n_chunks == 1:
data_id = self.parallel_backend.put(data)
return [data_id]
try:
# This is used as a check to determine whether data is iterable or not
# if it's the former, then the value will be used to determine the chunk indices.
n = len(data)
except TypeError:
data_id = self.parallel_backend.put(data)
return list(repeat(data_id, times=n_chunks))
else:
# This is very much inspired by numpy's array_split function
# The difference is that it only uses built-in functions
# and does not convert the input data to an array
chunk_size, remainder = divmod(n, n_chunks)
chunk_indices = tuple(
accumulate(
[0]
+ remainder * [chunk_size + 1]
+ (n_chunks - remainder) * [chunk_size]
)
)
chunks = []
for start_index, end_index in zip(chunk_indices[:-1], chunk_indices[1:]):
if start_index >= end_index:
break
chunk_id = self.parallel_backend.put(data[start_index:end_index])
chunks.append(chunk_id)
return chunks
@property
def n_jobs(self) -> int:
"""Effective number of jobs according to the used ParallelBackend instance."""
return self._n_jobs
@n_jobs.setter
def n_jobs(self, value: int):
self._n_jobs = self.parallel_backend.effective_n_jobs(value)
|
PypiClean
|
/symmetric_secret_share-0.0.8-py3-none-any.whl/sss_cli/__main__.py
|
from nacl.encoding import Base64Encoder
from nacl.utils import random, randombytes_deterministic
import typer
from sss_cli import __version__
from sss_cli._string_template import EXAMPLE_KEYCHAIN
from sss_cli.helper import get_keychain
from sss_cli.inject import inject
from sss_cli.share import share
app = typer.Typer()
@app.command("key")
def set_key(
clear: bool = typer.Option(
False, "-c", "--clear", help="Clear all keys in keychain"
),
force: bool = typer.Option(
False, "-f", "--force", help="Force clear all keys in keychain"
),
generate: bool = typer.Option(
False, "-g", "--generate", help="Generate a new 32 bytes key"
),
):
"""Edit keys in keychain."""
keychain = get_keychain()
if generate:
key = randombytes_deterministic(24, random(), Base64Encoder).decode("utf-8")
print(len(key))
typer.secho(f"Generated new key:", fg="green")
typer.secho(f"{key}", fg="bright_black", bg="white")
raise typer.Exit(code=0)
if clear:
if not force:
typer.confirm("Are you sure you want to delete it?", abort=True)
if keychain.is_file():
keychain.unlink()
typer.secho("Cleared keychain.", fg="green")
raise typer.Exit(code=0)
if not keychain.is_file():
keychain.write_text(EXAMPLE_KEYCHAIN)
typer.secho("Please edit keychain config file.", fg="green")
typer.launch(str(keychain))
@app.command("share")
def cmd_share(
config_path: str = typer.Argument(..., help="Path to your repo"),
key: str = typer.Option("", "-k", "--key", help="Password as plaintext"),
):
"""Update the cypher file by encrypting the secret file."""
share(config_path, key)
@app.command(name="inject")
def cmd_inject(
config_path: str = typer.Argument(..., help="Path to your repo"),
key: str = typer.Option("", "-k", "--key", help="Password as plaintext"),
):
"""Inject the decrypted cypher to correct path in repo."""
inject(config_path, key)
|
PypiClean
|
/bigdl_chronos_spark321-2.1.0b202207291-py3-none-manylinux1_x86_64.whl/bigdl/chronos/model/autoformer/Autoformer.py
|
# MIT License
# Copyright (c) 2021 THUML @ Tsinghua University
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# code adapted from https://github.com/thuml/Autoformer
import torch
import torch.nn as nn
from .layers.Embed import DataEmbedding_wo_pos
from .layers.AutoCorrelation import AutoCorrelation, AutoCorrelationLayer
from .layers.Autoformer_EncDec import Encoder, Decoder, EncoderLayer,\
DecoderLayer, my_Layernorm, series_decomp
import torch.optim as optim
import pytorch_lightning as pl
from collections import namedtuple
from ..utils import PYTORCH_REGRESSION_LOSS_MAP
class AutoFormer(pl.LightningModule):
"""
Autoformer is the first method to achieve the series-wise connection,
with inherent O(LlogL) complexity
"""
def __init__(self, configs):
super().__init__()
pl.seed_everything(configs.seed, workers=True)
self.seq_len = configs.seq_len
self.label_len = configs.label_len
self.pred_len = configs.pred_len
self.output_attention = configs.output_attention
self.optim = configs.optim
self.lr = configs.lr
self.lr_scheduler_milestones = configs.lr_scheduler_milestones
self.loss = loss_creator(configs.loss)
# Decomp
kernel_size = configs.moving_avg
self.decomp = series_decomp(kernel_size)
# Embedding
# The series-wise connection inherently contains the sequential information.
# Thus, we can discard the position embedding of transformers.
self.enc_embedding = DataEmbedding_wo_pos(configs.enc_in, configs.d_model, configs.embed,
configs.freq, configs.dropout)
self.dec_embedding = DataEmbedding_wo_pos(configs.dec_in, configs.d_model, configs.embed,
configs.freq, configs.dropout)
# Encoder
self.encoder = Encoder(
[
EncoderLayer(
AutoCorrelationLayer(
AutoCorrelation(False, configs.factor, attention_dropout=configs.dropout,
output_attention=configs.output_attention),
configs.d_model, configs.n_heads),
configs.d_model,
configs.d_ff,
moving_avg=configs.moving_avg,
dropout=configs.dropout,
activation=configs.activation
) for l in range(configs.e_layers)
],
norm_layer=my_Layernorm(configs.d_model)
)
# Decoder
self.decoder = Decoder(
[
DecoderLayer(
AutoCorrelationLayer(
AutoCorrelation(True, configs.factor, attention_dropout=configs.dropout,
output_attention=False),
configs.d_model, configs.n_heads),
AutoCorrelationLayer(
AutoCorrelation(False, configs.factor, attention_dropout=configs.dropout,
output_attention=False),
configs.d_model, configs.n_heads),
configs.d_model,
configs.c_out,
configs.d_ff,
moving_avg=configs.moving_avg,
dropout=configs.dropout,
activation=configs.activation,
)
for l in range(configs.d_layers)
],
norm_layer=my_Layernorm(configs.d_model),
projection=nn.Linear(configs.d_model, configs.c_out, bias=True)
)
def forward(self, x_enc, x_mark_enc, x_dec, x_mark_dec,
enc_self_mask=None, dec_self_mask=None, dec_enc_mask=None):
# decomp init
mean = torch.mean(x_enc, dim=1).unsqueeze(1).repeat(1, self.pred_len, 1)
zeros = torch.zeros([x_dec.shape[0], self.pred_len, x_dec.shape[2]], device=x_enc.device)
seasonal_init, trend_init = self.decomp(x_enc)
# decoder input
trend_init = torch.cat([trend_init[:, -self.label_len:, :], mean], dim=1)
seasonal_init = torch.cat([seasonal_init[:, -self.label_len:, :], zeros], dim=1)
# enc
enc_out = self.enc_embedding(x_enc, x_mark_enc)
enc_out, attns = self.encoder(enc_out, attn_mask=enc_self_mask)
# dec
dec_out = self.dec_embedding(seasonal_init, x_mark_dec)
seasonal_part, trend_part = self.decoder(dec_out, enc_out, x_mask=dec_self_mask,
cross_mask=dec_enc_mask, trend=trend_init)
# final
dec_out = trend_part + seasonal_part
if self.output_attention:
return dec_out[:, -self.pred_len:, :], attns
else:
return dec_out[:, -self.pred_len:, :] # [B, L, D]
def training_step(self, batch, batch_idx):
batch_x, batch_y, batch_x_mark, batch_y_mark = map(lambda x: x.float(), batch)
outputs = self(batch_x, batch_x_mark, batch_y, batch_y_mark)
outputs = outputs[:, -self.pred_len:, :]
batch_y = batch_y[:, -self.pred_len:, :]
return self.loss(outputs, batch_y)
def validation_step(self, batch, batch_idx):
batch_x, batch_y, batch_x_mark, batch_y_mark = map(lambda x: x.float(), batch)
outputs = self(batch_x, batch_x_mark, batch_y, batch_y_mark)
outputs = outputs[:, -self.pred_len:, :]
batch_y = batch_y[:, -self.pred_len:, :]
self.log("val_loss", self.loss(outputs, batch_y))
def predict_step(self, batch, batch_idx):
batch_x, batch_y, batch_x_mark, batch_y_mark = map(lambda x: x.float(), batch)
outputs = self(batch_x, batch_x_mark, batch_y, batch_y_mark)
outputs = outputs[:, -self.pred_len:, :]
return outputs
def configure_optimizers(self):
optimizer = getattr(optim, self.optim)(self.parameters(), lr=self.lr)
if self.lr_scheduler_milestones is not None:
scheduler = torch.optim.lr_scheduler.MultiStepLR(
optimizer, gamma=0.5, verbose=True,
milestones=self.lr_scheduler_milestones)
return [optimizer], [scheduler]
else:
return optimizer
def model_creator(config):
args = _transform_config_to_namedtuple(config)
return AutoFormer(args)
def loss_creator(loss_name):
if loss_name in PYTORCH_REGRESSION_LOSS_MAP:
loss_name = PYTORCH_REGRESSION_LOSS_MAP[loss_name]
else:
from bigdl.nano.utils.log4Error import invalidInputError
invalidInputError(False,
f"Got '{loss_name}' for loss name, "
"where 'mse', 'mae' or 'huber_loss' is expected")
return getattr(torch.nn, loss_name)()
def _transform_config_to_namedtuple(config):
args = namedtuple("config", ['seq_len', 'label_len',
'pred_len', 'output_attention',
'moving_avg', 'enc_in',
'd_model', 'embed',
'freq', 'dropout',
'dec_in', 'factor',
'n_heads', 'd_ff',
'activation', 'e_layers',
'c_out', 'loss',
'optim', 'lr',
'lr_scheduler_milestones'])
args.seq_len = config['seq_len']
args.label_len = config['label_len']
args.pred_len = config['pred_len']
args.output_attention = config.get('output_attention', False)
args.moving_avg = config.get('moving_avg', 25)
args.enc_in = config['enc_in']
args.d_model = config.get('d_model', 512)
args.embed = config.get('embed', 'timeF')
args.freq = config['freq']
args.dropout = config.get('dropout', 0.05)
args.dec_in = config['dec_in']
args.factor = config.get('factor', 3)
args.n_heads = config.get('n_heads', 8)
args.d_ff = config.get('d_ff', 2048)
args.activation = config.get('activation', 'gelu')
args.e_layers = config.get('e_layers', 2)
args.c_out = config['c_out']
args.d_layers = config.get('d_layers', 1)
args.loss = config.get("loss", "mse")
args.optim = config.get("optim", "Adam")
args.lr = config.get("lr", 0.0001)
args.lr_scheduler_milestones = config.get("lr_scheduler_milestones", None)
args.seed = config.get("seed", None)
return args
|
PypiClean
|
/ua_box_api-1.0.0-py3-none-any.whl/ua_box_api/ua_box_api.py
|
from io import BytesIO
import boxsdk
__author__ = "Stephen Stern"
__maintainer__ = "Stephen Stern"
__email__ = "[email protected]"
class BoxApi:
def __init__(self, stache_secret):
# Authenticate the API client using the JWT authentication method.
private_key_stream = BytesIO(
stache_secret["boxAppSettings"]["appAuth"]["privateKey"].encode())
jwt_options = {
"client_id": stache_secret["boxAppSettings"]["clientID"],
"client_secret": stache_secret["boxAppSettings"]["clientSecret"],
"enterprise_id": stache_secret["enterpriseID"],
"jwt_key_id": stache_secret[
"boxAppSettings"]["appAuth"]["publicKeyID"],
"rsa_private_key_passphrase": stache_secret[
"boxAppSettings"]["appAuth"]["passphrase"].encode(),
"rsa_private_key_data": private_key_stream
}
auth = boxsdk.JWTAuth(**jwt_options)
auth.authenticate_instance()
self.client = boxsdk.Client(auth)
def get_all_items(self, item_id):
"""Returns list of all items in the object with the given item_id."""
# If a folder is passed in, it gets caught in an infinite while loop
# with a bare except somewhere -- instead, check that the id is an int.
if not (isinstance(item_id, int) or isinstance(item_id, str)):
raise TypeError("Item_id must be an int.")
folder = self.client.folder(folder_id=item_id)
items = list()
offset = 0
has_next_item = True
# Every 300000 items, get a new generator; otherwise, add the current
# generator's next(). If the current generator doesn't have a next(),
# make while condition False.
while has_next_item:
if len(items) == offset:
items_generator = folder.get_items(
limit=offset + 300000, offset=offset)
offset += 300000
try:
items.append(items_generator.next())
except StopIteration:
has_next_item = False
return items
def find_child_by_name(self, name, item_id):
"""Returns object with name if found in item_id, or None if not."""
matches = [x for x in self.get_all_items(item_id) if x.name == name]
if matches:
return matches[0]
return None
def get_duplicate_file_name(self, folder_id, current_name, zip_file=False):
"""If the given name is in the folder, return a modified file name."""
search_name = current_name
if zip_file:
search_name = current_name.replace(".zip", "")
folder_items = self.get_all_items(folder_id)
duplicates = [
item.name for item in folder_items if search_name in item.name]
if duplicates:
if zip_file:
return f"{search_name}({len(duplicates)}).zip"
return f"{current_name}({len(duplicates)})"
return current_name
|
PypiClean
|
/gino-factory-0.1.0.tar.gz/gino-factory-0.1.0/README.rst
|
Gino Factory
===========================
| **Install**: ``pip install gino-factory``
**Github**: https://github.com/Basalex/gino_factory
Usage:
~~~~~~~~~~~~~~~~~~
Let's create two gino models
.. code:: python
class Team(db.Model):
__tablename__ = 'teams'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255))
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
parent_id = db.Column(sa.Integer, sa.ForeignKey(f'users.id', name='parent_id_fkey'))
username = db.Column(db.String(255), nullable=False, unique=True)
custom_field = db.Column(db.String(255), nullable=False, unique=True)
custom_factory = db.Column(db.String(255), nullable=False, unique=True)
Next you need to register this model
.. code:: python
from gino_factory import GinoFactory
class Factory(GinoFactory):
pass
Factory.register(Team)
Factory.register(User, custom_field='value', custom_factory=function)
Single objects
.. code:: python
async def test():
user = await Factory.user()
assert user.team_id is not None
team = await Factory.test()
user = await Factory.user(team_id=team)
assert team.id == user.team_id
Iterator objects
.. code:: python
from itertools import cycle
async def test():
teams = await Factory.cycle(10).team()
users = await Factory.cycle(10).user(team_id=iter(teams))
assert users[-1].team_id == teams[-1].id
teams = await Factory.cycle(3).team() # id: 1,2,3
users = await Factory.cycle(10).user(team_id=cycle(teams))
assert [u.team_id for u in users] == [1, 2, 3, 1, 2, 3, 1, 2, 3, 1]
Trees
* **Tree attributes:**
* amount: int = 50 - maximum amount of total members
* max_depth: int = 5 - maximum tree depth
* top_parents_amount: int = 3 - Amount of "top" parents (parent_id=None)
* top_parents: CRUDModel = None - Pre-generated top-parents
* parent_field_name: str = 'parent_id' - Name of parent field
Following code generates single head tree with max depth of 3
.. code:: python
async def test():
users = await Factory.tree(amount=100, max_depth=3, top_parents_amount=1).user()
or you may fill your database with random data using __random__ method
.. code:: python
await Factory.__random__()
This method inserts random data to all registered tables
|
PypiClean
|
/rcsb.utils.io-1.45.tar.gz/rcsb.utils.io-1.45/rcsb/utils/io/ExecUtils.py
|
import logging
import os
import subprocess
import sys
logger = logging.getLogger(__name__)
class ExecUtils(object):
"""Wrapper for subprocess execution."""
def __init__(self):
"""Wrapper for subprocess execution"""
def run(self, execPath, execArgList=None, outPath=None, outAppend=False, timeOut=None, inpPath=None, suppressStderr=False):
"""Execute the input program as a blocking subprocess with optional timeout.
Args:
execPath (str): path to executable program or script
execArgList (list, optional): argument list. Defaults to None.
outPath (str, optional): redirect stdout and stderr to this file handle. Defaults to None.
inpPath (str, optional): redirect stdin to this file handle. Defaults to None.
outAppend (bool, optional): append output. Defaults to False.
timeOut (float, optional): timeout (seconds). Defaults to None.
suppressStderr (bool, optional): suppress stderr output (default: combined with stdout)
Returns:
bool: true for sucess or False otherwise
"""
retCode = False
subProcResult = None
kwD = {}
if suppressStderr:
myStderr = subprocess.DEVNULL
else:
myStderr = subprocess.STDOUT
try:
if not os.path.isfile(execPath) and os.access(execPath, os.X_OK):
return retCode
if sys.version_info[0] > 2 and timeOut:
kwD = {"timeout": timeOut}
cmdL = [execPath]
if execArgList:
cmdL.extend(execArgList)
if outPath and inpPath:
myMode = "a" if outAppend else "w"
with open(outPath, myMode) as ofh, open(inpPath, "r") as ifh:
subProcResult = subprocess.call(cmdL, stdout=ofh, stdin=ifh, stderr=myStderr, **kwD)
elif outPath:
myMode = "a" if outAppend else "w"
with open(outPath, myMode) as ofh:
subProcResult = subprocess.call(cmdL, stdout=ofh, stderr=myStderr, **kwD)
else:
subProcResult = subprocess.call(cmdL, **kwD)
retCode = not subProcResult
except Exception as e:
logger.exception("Failing execution of %s (%s) with %s", execPath, execArgList, str(e))
#
if subProcResult != 0:
logger.error("return code is %r", subProcResult)
return retCode
def runShell(self, script, outPath=None, outAppend=False, timeOut=None, inpPath=None, suppressStderr=False):
"""Execute the input program as a blocking subprocess with optional timeout.
Args:
script (str): script
outPath (str, optional): redirect stdout and stderr to this file handle. Defaults to None.
inpPath (str, optional): redirect stdin to this file handle. Defaults to None.
outAppend (bool, optional): append output. Defaults to False.
timeOut (float, optional): timeout (seconds). Defaults to None.
suppressStderr (bool, optional): suppress stderr output (default: combined with stdout)
Returns:
bool: true for sucess or False otherwise
"""
retCode = 0
kwD = {}
subProcResult = None
if suppressStderr:
myStderr = subprocess.DEVNULL
else:
myStderr = subprocess.STDOUT
try:
if sys.version_info[0] > 2 and timeOut:
kwD = {"timeout": timeOut}
#
if outPath and inpPath:
myMode = "a" if outAppend else "w"
with open(outPath, myMode) as ofh, open(inpPath, "r") as ifh:
subProcResult = subprocess.run(script, stdout=ofh, stdin=ifh, stderr=myStderr, shell=True, check=False, **kwD)
elif outPath:
myMode = "a" if outAppend else "w"
with open(outPath, myMode) as ofh:
subProcResult = subprocess.run(script, stdout=ofh, stderr=myStderr, shell=True, check=False, **kwD)
else:
subProcResult = subprocess.run(script, shell=True, check=False, **kwD)
retCode = not subProcResult.returncode
except Exception as e:
logger.exception("Failing execution of %r with %s", subProcResult, str(e))
#
if subProcResult and subProcResult.returncode != 0:
logger.warning("return code is %r", subProcResult.returncode)
return retCode
|
PypiClean
|
/django-epic-0.6.1.tar.gz/django-epic-0.6.1/epic/kicad/bom.py
|
from types import SimpleNamespace
import logging
import re
import xml.etree.ElementTree as ET
from pathlib import Path
from egauge import webapi
from epic.base import Enum, format_part_number, strchoice
from . import csv
from .error import Error, UnknownFlavorsError
from .parts_cache import PartsCache
EP_PAT = re.compile(r"EP(-(.*))?")
INST_PAT = re.compile(r"Installation(-(.*))?")
VALUE_PAT = re.compile(r"Value(-(.*))?")
log = logging.getLogger(__name__)
def _bom_append(bom, component):
if component.part_id not in bom:
bom[component.part_id] = []
bom[component.part_id].append(component)
def _update(res, field, desired_flavors, detected_flavors):
"""Process object RES by matching FIELD.attrib['name'] against
RES.pattern. If there is a match M, M.group(2) must evaluate to
the name of the flavor of the field. If this flavor matches the
specified flavor, we have an exact match and RES.value is set to
field.text, RES.name is set to field.attrib['name']. If the
flavor of the field is empty and RES.value is None we have a
default match and RES is updated like for an exact match.
DETECTED_FLAVOR is a set of flavors found.
"""
m = res.pattern.match(field.attrib["name"])
if m is None:
return
this_flavor = None
if m.lastindex is not None:
this_flavor = m.group(2)
detected_flavors |= {this_flavor} # update set of all detected flavors
if this_flavor in desired_flavors or (
this_flavor is None and res.value is None
):
res.name = field.attrib["name"]
res.value = field.text
class Component:
# dictionary of components that first referenced a particular best part:
first_reference = {}
def __init__(self, part_id, refdes, value, footprint):
self.part_id = part_id
self.part = None
self.refdes = refdes
self.value = value
self.footprint = footprint
self.mfg = None
self.mfg_pn = None
PartsCache.prefetch(part_id)
def load_from_epic(self):
self.part = PartsCache.get(self.part_id)
if self.value != self.part.val:
log.warning(
'%s has value "%s" but part %s has value "%s"',
self.refdes,
self.value,
self,
self.part.val,
)
if self.footprint != self.part.footprint:
if self.footprint:
msg = "changed from %s" % self.footprint
else:
msg = "set"
if self.part.footprint:
log.warning(
"%s footprint should be %s to %s."
% (self.refdes, msg, self.part.footprint)
)
else:
log.warning(
"%s footprint %s should be removed."
% (self.refdes, self.footprint)
)
best_part = self.part.best_part
first_ref = self.__class__.first_reference.get(best_part)
if first_ref is None:
self.__class__.first_reference[best_part] = self
else:
if self.part_id != first_ref.part_id:
log.warning(
"%s uses %s instead of equivalent %s used by %s",
self.refdes,
self,
first_ref,
first_ref.refdes,
)
self.mfg = self.part.mfg
self.mfg_pn = self.part.mfg_pn
def __str__(self):
return format_part_number(self.part_id)
class BOM:
"""A BOM object is a bill-of-material for a given PCB."""
def __init__(
self,
epic_api,
xml_filename,
manufacturer,
flavors=None,
toolname="kicad-to-epic-bom",
):
"""Create a BOM object that represents the KiCad intermediate XML
netlist stored in file XML_FILENAME.
EPIC_API must be an instance of EPICAPIClient which provides
access to the EPIC JSON API.
MANUFACTURER is the name to use as the creator of the board.
This is typically a short version of the company that designed
the board. For example, "eGauge" might be used by "eGauge
Systems LLC".
A single Eeschema schematic may define multiple flavors
(versions) of a PCB. Each flavor results in its own BOM. The
flavors to be used is selected with argument FLAVORS. If this
argument is not set, the default ("unflavored") BOM is
generated.
The part number and the installation of that part is
determined based on various fields of the schematic component.
Specifically, the following fields are used:
Field Name: Purpose:
------------ ---------------------------------------------
EP[-FLAVOR] Specifies the EPIC part number to use for the
component. If -FLAVOR is specified, the field
specifies the part number to use only for that
FLAVOR.
Installation[-FLAVOR]
If the value of this field is "DNP" (Do Not Place),
then the component is omitted from the BOM.
If it is "P" (Place), then the component is included
in the BOM.
In the CSV output file, do-not-place components
are listed separately at the end of the file.
If -FLAVOR is specified, the field specifies the
installation-type of the component only for that
FLAVOR.
Flavored field-names take precedence over unflavored fields.
For example, if a component specified the fields:
EP 123
EP-lite 42
then part number 42 would be used for flavor "lite" but 123
would be used in all other cases.
"""
self.manufacturer = manufacturer
self.toolname = toolname
self.schematic_name = "unknown"
self.revision = ""
self.sources = []
self.comps = {} # dictionary of components in the BOM
self.dnps = {} # dictionary of do-not-place components
self.epic_api = epic_api
PartsCache.set_epic(epic_api)
if flavors is None:
flavors = []
self.flavors = flavors
try:
xml = ET.parse(xml_filename)
except ET.ParseError as _:
raise Error("Input file is not a valid XML file.", xml_filename)
design = xml.find("design")
if design is not None:
source = design.findtext("source")
if source is not None:
path = Path(source)
self.schematic_name = path.with_suffix("").name
sheet = design.find("sheet")
for sheet in design.iter("sheet"):
title_block = sheet.find("title_block")
if title_block is not None:
if self.revision == "":
rev = title_block.findtext("rev")
if rev:
self.revision = "-rev" + rev.lstrip().rstrip()
self.sources.append(title_block.findtext("source"))
detected_flavors = set()
for comp in xml.find("components"):
refdes = comp.attrib.get("ref")
footprint = comp.findtext("footprint") or ""
part = SimpleNamespace(name=None, value=None, pattern=EP_PAT)
inst = SimpleNamespace(name=None, value=None, pattern=INST_PAT)
value = SimpleNamespace(name=None, value=None, pattern=VALUE_PAT)
fields = comp.find("fields")
if fields is not None:
for field in fields:
_update(part, field, flavors, detected_flavors)
_update(inst, field, flavors, detected_flavors)
_update(value, field, flavors, detected_flavors)
do_not_place = inst.value == "DNP"
if not part.value:
if not do_not_place:
log.warning(
"%s skipped due to missing EPIC part number"
'(field "EP")',
refdes,
)
continue
if value.value is None:
value.value = comp.findtext("value", default="n/a")
if do_not_place:
log.info(
'%s marked as do-not-place ("%s=DNP")', refdes, inst.name
)
try:
part_id = int(part.value)
except ValueError:
log.warning(
'%s has invalid EPIC part number "%s"', refdes, part.value
)
continue
c = Component(part_id, refdes, value.value, footprint)
if do_not_place:
_bom_append(self.dnps, c)
else:
_bom_append(self.comps, c)
for l in self.comps.values():
for c in l:
c.load_from_epic()
for l in self.dnps.values():
for c in l:
c.load_from_epic()
if flavors:
unknown_flavors = []
for flavor in flavors:
if flavor not in detected_flavors:
unknown_flavors.append(flavor)
if unknown_flavors:
raise UnknownFlavorsError(unknown_flavors, detected_flavors)
if detected_flavors:
if not detected_flavors:
detected_flavors_str = "no flavors"
else:
if len(detected_flavors) > 1:
detected_flavors_str = "flavors: "
else:
detected_flavors_str = "flavor: "
detected_flavors_str += ", ".join(sorted(detected_flavors))
log.info("this schematic defines %s", detected_flavors_str)
def save_as_epic_assembly(self, force_update=False):
"""Save the BOM as an EPIC assembly. If an EPIC assembly with the
same part-number already exists, it is updated unless the
assembly indicates that it was last updated by a different
tool or an interactive EPIC user. This is dected based on the
last update type and the toolname set when the BOM object was
created (see argument TOOLNAME). If the assembly was last
edited by a different tool or interactive user, a kicad.Error
exception is raised, unless FORCE_UPDATE is True.
If an EPIC assembly item is created, its manufacturer is set
to the MANUFACTURER specified when creating the BOM object and
its part number will have the form bom:SCHEMATIC_NAME[-FLAVOR],
where SCHEMATIC_NAME is the name of the schematic and FLAVOR is
the name of the selected flavor (if any).
Returns a pair containing the EPIC assembly part that was
created/updated for the BOM and a boolean which is True if the
assembly part created (False if it was updated).
"""
assembly_name = "bom:" + self.schematic_name + self.revision
if self.flavors:
assembly_name += "-" + "-".join(self.flavors)
# see if the assembly exists already:
old_assy = None
try:
reply = self.epic_api.get(
"part/?mfg=%s&mfg_pn=%s" % (self.manufacturer, assembly_name)
)
if reply:
old_assy = SimpleNamespace(**reply[0])
except webapi.Error:
pass
if old_assy:
if (
old_assy.last_bom_mod_type != Enum.LAST_MOD_TYPE_TOOL
or old_assy.last_bom_mod_name != self.toolname
):
last_editor = "%s %s" % (
strchoice(
Enum.LAST_MOD_CHOICES, old_assy.last_bom_mod_type
),
old_assy.last_bom_mod_name,
)
if force_update:
log.info(
"overwriting part %s %s last modified by %s",
self.manufacturer,
assembly_name,
last_editor,
)
else:
raise Error(
"Refusing to overwrite part last modified "
"by %s %s."
% (last_editor, format_part_number(old_assy.id))
)
desc = "BOM %s" % self.schematic_name
if self.flavors:
desc += "-" + "-".join(self.flavors)
assembly_part = SimpleNamespace(
descr=desc,
mfg=self.manufacturer,
mfg_pn=assembly_name,
mounting=Enum.MOUNTING_CHASSIS,
target_price=1000,
overage=1,
spq=1,
lead_time=4,
status=Enum.STATUS_PREVIEW,
last_bom_mod_type=Enum.LAST_MOD_TYPE_TOOL,
last_bom_mod_name=self.toolname,
)
try:
if old_assy:
reply = self.epic_api.put(
"part/%d/" % old_assy.id, assembly_part
)
else:
reply = self.epic_api.post("part/", assembly_part)
assembly_part = SimpleNamespace(**reply)
except webapi.Error as e:
raise Error(
"Failed to create assembly part.", assembly_part
) from e
# create assembly-items for the components in the BOM:
assy_items = []
for components in self.comps.values():
comp = components[0]
refdes = ",".join(sorted([c.refdes for c in components]))
assy_item = SimpleNamespace(
assy=assembly_part.id,
comp=comp.part_id,
qty=len(components),
refdes=refdes,
)
assy_items.append(assy_item.__dict__)
try:
reply = self.epic_api.post("assembly_item/", assy_items)
except webapi.Error as e:
raise Error("Failed to save assembly items.", assy_items) from e
return (assembly_part, not old_assy)
def save_as_csv(self, filename, **kwargs):
"""Save the BOM as a CSV text file. See epic.kicad.csv.write() for a
description of available parameters.
"""
csv.write(filename, self.epic_api, self, **kwargs)
|
PypiClean
|
/tagtools-0.8d.tar.gz/tagtools-0.8d/docs/flickr.rst
|
FlickrTokenizer
================
.. currentmodule:: tagtools
.. moduleauthor:: Gustavo Picon <[email protected]>
.. inheritance-diagram:: FlickrTokenizer
.. autoclass:: FlickrTokenizer
:show-inheritance:
Example::
FlickrTokenizer.str2tags('"Tag 1" Tag2 "TAG 1" Tag3')
returns::
[('tag 1', 'Tag 1'), ('tag2', 'Tag2'), ('tag3', 'Tag3')]
and::
FlickrTokenizer.tags2str(['tag 1', 'tag2', 'tag3'])
returns::
'"tag 1" tag2 tag3'
.. note::
Flickr tags are very... peculiar. The test suite has lot of weird
cases and they all work exactly like Flickr. Please let me know if
there is a corner case I'm not covering.
|
PypiClean
|
/boto3_type_annotations_with_docs-0.3.1.tar.gz/boto3_type_annotations_with_docs-0.3.1/boto3_type_annotations/xray/paginator.py
|
from typing import Dict
from typing import List
from datetime import datetime
from botocore.paginate import Paginator
class BatchGetTraces(Paginator):
def paginate(self, TraceIds: List, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`XRay.Client.batch_get_traces`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/BatchGetTraces>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
TraceIds=[
'string',
],
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'Traces': [
{
'Id': 'string',
'Duration': 123.0,
'Segments': [
{
'Id': 'string',
'Document': 'string'
},
]
},
],
'UnprocessedTraceIds': [
'string',
],
}
**Response Structure**
- *(dict) --*
- **Traces** *(list) --*
Full traces for the specified requests.
- *(dict) --*
A collection of segment documents with matching trace IDs.
- **Id** *(string) --*
The unique identifier for the request that generated the trace's segments and subsegments.
- **Duration** *(float) --*
The length of time in seconds between the start time of the root segment and the end time of the last segment that completed.
- **Segments** *(list) --*
Segment documents for the segments and subsegments that comprise the trace.
- *(dict) --*
A segment from a trace that has been ingested by the X-Ray service. The segment can be compiled from documents uploaded with PutTraceSegments , or an ``inferred`` segment for a downstream service, generated from a subsegment sent by the service that called it.
For the full segment document schema, see `AWS X-Ray Segment Documents <https://docs.aws.amazon.com/xray/latest/devguide/xray-api-segmentdocuments.html>`__ in the *AWS X-Ray Developer Guide* .
- **Id** *(string) --*
The segment's ID.
- **Document** *(string) --*
The segment document.
- **UnprocessedTraceIds** *(list) --*
Trace IDs of requests that haven't been processed.
- *(string) --*
:type TraceIds: list
:param TraceIds: **[REQUIRED]**
Specify the trace IDs of requests for which to retrieve segments.
- *(string) --*
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class GetGroups(Paginator):
def paginate(self, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`XRay.Client.get_groups`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetGroups>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'Groups': [
{
'GroupName': 'string',
'GroupARN': 'string',
'FilterExpression': 'string'
},
],
}
**Response Structure**
- *(dict) --*
- **Groups** *(list) --*
The collection of all active groups.
- *(dict) --*
Details for a group without metadata.
- **GroupName** *(string) --*
The unique case-sensitive name of the group.
- **GroupARN** *(string) --*
The ARN of the group generated based on the GroupName.
- **FilterExpression** *(string) --*
The filter expression defining the parameters to include traces.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class GetSamplingRules(Paginator):
def paginate(self, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`XRay.Client.get_sampling_rules`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetSamplingRules>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'SamplingRuleRecords': [
{
'SamplingRule': {
'RuleName': 'string',
'RuleARN': 'string',
'ResourceARN': 'string',
'Priority': 123,
'FixedRate': 123.0,
'ReservoirSize': 123,
'ServiceName': 'string',
'ServiceType': 'string',
'Host': 'string',
'HTTPMethod': 'string',
'URLPath': 'string',
'Version': 123,
'Attributes': {
'string': 'string'
}
},
'CreatedAt': datetime(2015, 1, 1),
'ModifiedAt': datetime(2015, 1, 1)
},
],
}
**Response Structure**
- *(dict) --*
- **SamplingRuleRecords** *(list) --*
Rule definitions and metadata.
- *(dict) --*
A SamplingRule and its metadata.
- **SamplingRule** *(dict) --*
The sampling rule.
- **RuleName** *(string) --*
The name of the sampling rule. Specify a rule by either name or ARN, but not both.
- **RuleARN** *(string) --*
The ARN of the sampling rule. Specify a rule by either name or ARN, but not both.
- **ResourceARN** *(string) --*
Matches the ARN of the AWS resource on which the service runs.
- **Priority** *(integer) --*
The priority of the sampling rule.
- **FixedRate** *(float) --*
The percentage of matching requests to instrument, after the reservoir is exhausted.
- **ReservoirSize** *(integer) --*
A fixed number of matching requests to instrument per second, prior to applying the fixed rate. The reservoir is not used directly by services, but applies to all services using the rule collectively.
- **ServiceName** *(string) --*
Matches the ``name`` that the service uses to identify itself in segments.
- **ServiceType** *(string) --*
Matches the ``origin`` that the service uses to identify its type in segments.
- **Host** *(string) --*
Matches the hostname from a request URL.
- **HTTPMethod** *(string) --*
Matches the HTTP method of a request.
- **URLPath** *(string) --*
Matches the path from a request URL.
- **Version** *(integer) --*
The version of the sampling rule format (``1`` ).
- **Attributes** *(dict) --*
Matches attributes derived from the request.
- *(string) --*
- *(string) --*
- **CreatedAt** *(datetime) --*
When the rule was created.
- **ModifiedAt** *(datetime) --*
When the rule was last modified.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class GetSamplingStatisticSummaries(Paginator):
def paginate(self, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`XRay.Client.get_sampling_statistic_summaries`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetSamplingStatisticSummaries>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'SamplingStatisticSummaries': [
{
'RuleName': 'string',
'Timestamp': datetime(2015, 1, 1),
'RequestCount': 123,
'BorrowCount': 123,
'SampledCount': 123
},
],
}
**Response Structure**
- *(dict) --*
- **SamplingStatisticSummaries** *(list) --*
Information about the number of requests instrumented for each sampling rule.
- *(dict) --*
Aggregated request sampling data for a sampling rule across all services for a 10 second window.
- **RuleName** *(string) --*
The name of the sampling rule.
- **Timestamp** *(datetime) --*
The start time of the reporting window.
- **RequestCount** *(integer) --*
The number of requests that matched the rule.
- **BorrowCount** *(integer) --*
The number of requests recorded with borrowed reservoir quota.
- **SampledCount** *(integer) --*
The number of requests recorded.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class GetServiceGraph(Paginator):
def paginate(self, StartTime: datetime, EndTime: datetime, GroupName: str = None, GroupARN: str = None, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`XRay.Client.get_service_graph`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetServiceGraph>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
StartTime=datetime(2015, 1, 1),
EndTime=datetime(2015, 1, 1),
GroupName='string',
GroupARN='string',
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'StartTime': datetime(2015, 1, 1),
'EndTime': datetime(2015, 1, 1),
'Services': [
{
'ReferenceId': 123,
'Name': 'string',
'Names': [
'string',
],
'Root': True|False,
'AccountId': 'string',
'Type': 'string',
'State': 'string',
'StartTime': datetime(2015, 1, 1),
'EndTime': datetime(2015, 1, 1),
'Edges': [
{
'ReferenceId': 123,
'StartTime': datetime(2015, 1, 1),
'EndTime': datetime(2015, 1, 1),
'SummaryStatistics': {
'OkCount': 123,
'ErrorStatistics': {
'ThrottleCount': 123,
'OtherCount': 123,
'TotalCount': 123
},
'FaultStatistics': {
'OtherCount': 123,
'TotalCount': 123
},
'TotalCount': 123,
'TotalResponseTime': 123.0
},
'ResponseTimeHistogram': [
{
'Value': 123.0,
'Count': 123
},
],
'Aliases': [
{
'Name': 'string',
'Names': [
'string',
],
'Type': 'string'
},
]
},
],
'SummaryStatistics': {
'OkCount': 123,
'ErrorStatistics': {
'ThrottleCount': 123,
'OtherCount': 123,
'TotalCount': 123
},
'FaultStatistics': {
'OtherCount': 123,
'TotalCount': 123
},
'TotalCount': 123,
'TotalResponseTime': 123.0
},
'DurationHistogram': [
{
'Value': 123.0,
'Count': 123
},
],
'ResponseTimeHistogram': [
{
'Value': 123.0,
'Count': 123
},
]
},
],
'ContainsOldGroupVersions': True|False,
}
**Response Structure**
- *(dict) --*
- **StartTime** *(datetime) --*
The start of the time frame for which the graph was generated.
- **EndTime** *(datetime) --*
The end of the time frame for which the graph was generated.
- **Services** *(list) --*
The services that have processed a traced request during the specified time frame.
- *(dict) --*
Information about an application that processed requests, users that made requests, or downstream services, resources and applications that an application used.
- **ReferenceId** *(integer) --*
Identifier for the service. Unique within the service map.
- **Name** *(string) --*
The canonical name of the service.
- **Names** *(list) --*
A list of names for the service, including the canonical name.
- *(string) --*
- **Root** *(boolean) --*
Indicates that the service was the first service to process a request.
- **AccountId** *(string) --*
Identifier of the AWS account in which the service runs.
- **Type** *(string) --*
The type of service.
* AWS Resource - The type of an AWS resource. For example, ``AWS::EC2::Instance`` for a application running on Amazon EC2 or ``AWS::DynamoDB::Table`` for an Amazon DynamoDB table that the application used.
* AWS Service - The type of an AWS service. For example, ``AWS::DynamoDB`` for downstream calls to Amazon DynamoDB that didn't target a specific table.
* ``client`` - Represents the clients that sent requests to a root service.
* ``remote`` - A downstream service of indeterminate type.
- **State** *(string) --*
The service's state.
- **StartTime** *(datetime) --*
The start time of the first segment that the service generated.
- **EndTime** *(datetime) --*
The end time of the last segment that the service generated.
- **Edges** *(list) --*
Connections to downstream services.
- *(dict) --*
Information about a connection between two services.
- **ReferenceId** *(integer) --*
Identifier of the edge. Unique within a service map.
- **StartTime** *(datetime) --*
The start time of the first segment on the edge.
- **EndTime** *(datetime) --*
The end time of the last segment on the edge.
- **SummaryStatistics** *(dict) --*
Response statistics for segments on the edge.
- **OkCount** *(integer) --*
The number of requests that completed with a 2xx Success status code.
- **ErrorStatistics** *(dict) --*
Information about requests that failed with a 4xx Client Error status code.
- **ThrottleCount** *(integer) --*
The number of requests that failed with a 419 throttling status code.
- **OtherCount** *(integer) --*
The number of requests that failed with untracked 4xx Client Error status codes.
- **TotalCount** *(integer) --*
The total number of requests that failed with a 4xx Client Error status code.
- **FaultStatistics** *(dict) --*
Information about requests that failed with a 5xx Server Error status code.
- **OtherCount** *(integer) --*
The number of requests that failed with untracked 5xx Server Error status codes.
- **TotalCount** *(integer) --*
The total number of requests that failed with a 5xx Server Error status code.
- **TotalCount** *(integer) --*
The total number of completed requests.
- **TotalResponseTime** *(float) --*
The aggregate response time of completed requests.
- **ResponseTimeHistogram** *(list) --*
A histogram that maps the spread of client response times on an edge.
- *(dict) --*
An entry in a histogram for a statistic. A histogram maps the range of observed values on the X axis, and the prevalence of each value on the Y axis.
- **Value** *(float) --*
The value of the entry.
- **Count** *(integer) --*
The prevalence of the entry.
- **Aliases** *(list) --*
Aliases for the edge.
- *(dict) --*
An alias for an edge.
- **Name** *(string) --*
The canonical name of the alias.
- **Names** *(list) --*
A list of names for the alias, including the canonical name.
- *(string) --*
- **Type** *(string) --*
The type of the alias.
- **SummaryStatistics** *(dict) --*
Aggregated statistics for the service.
- **OkCount** *(integer) --*
The number of requests that completed with a 2xx Success status code.
- **ErrorStatistics** *(dict) --*
Information about requests that failed with a 4xx Client Error status code.
- **ThrottleCount** *(integer) --*
The number of requests that failed with a 419 throttling status code.
- **OtherCount** *(integer) --*
The number of requests that failed with untracked 4xx Client Error status codes.
- **TotalCount** *(integer) --*
The total number of requests that failed with a 4xx Client Error status code.
- **FaultStatistics** *(dict) --*
Information about requests that failed with a 5xx Server Error status code.
- **OtherCount** *(integer) --*
The number of requests that failed with untracked 5xx Server Error status codes.
- **TotalCount** *(integer) --*
The total number of requests that failed with a 5xx Server Error status code.
- **TotalCount** *(integer) --*
The total number of completed requests.
- **TotalResponseTime** *(float) --*
The aggregate response time of completed requests.
- **DurationHistogram** *(list) --*
A histogram that maps the spread of service durations.
- *(dict) --*
An entry in a histogram for a statistic. A histogram maps the range of observed values on the X axis, and the prevalence of each value on the Y axis.
- **Value** *(float) --*
The value of the entry.
- **Count** *(integer) --*
The prevalence of the entry.
- **ResponseTimeHistogram** *(list) --*
A histogram that maps the spread of service response times.
- *(dict) --*
An entry in a histogram for a statistic. A histogram maps the range of observed values on the X axis, and the prevalence of each value on the Y axis.
- **Value** *(float) --*
The value of the entry.
- **Count** *(integer) --*
The prevalence of the entry.
- **ContainsOldGroupVersions** *(boolean) --*
A flag indicating whether the group's filter expression has been consistent, or if the returned service graph may show traces from an older version of the group's filter expression.
:type StartTime: datetime
:param StartTime: **[REQUIRED]**
The start of the time frame for which to generate a graph.
:type EndTime: datetime
:param EndTime: **[REQUIRED]**
The end of the timeframe for which to generate a graph.
:type GroupName: string
:param GroupName:
The name of a group to generate a graph based on.
:type GroupARN: string
:param GroupARN:
The ARN of a group to generate a graph based on.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class GetTimeSeriesServiceStatistics(Paginator):
def paginate(self, StartTime: datetime, EndTime: datetime, GroupName: str = None, GroupARN: str = None, EntitySelectorExpression: str = None, Period: int = None, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`XRay.Client.get_time_series_service_statistics`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetTimeSeriesServiceStatistics>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
StartTime=datetime(2015, 1, 1),
EndTime=datetime(2015, 1, 1),
GroupName='string',
GroupARN='string',
EntitySelectorExpression='string',
Period=123,
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'TimeSeriesServiceStatistics': [
{
'Timestamp': datetime(2015, 1, 1),
'EdgeSummaryStatistics': {
'OkCount': 123,
'ErrorStatistics': {
'ThrottleCount': 123,
'OtherCount': 123,
'TotalCount': 123
},
'FaultStatistics': {
'OtherCount': 123,
'TotalCount': 123
},
'TotalCount': 123,
'TotalResponseTime': 123.0
},
'ServiceSummaryStatistics': {
'OkCount': 123,
'ErrorStatistics': {
'ThrottleCount': 123,
'OtherCount': 123,
'TotalCount': 123
},
'FaultStatistics': {
'OtherCount': 123,
'TotalCount': 123
},
'TotalCount': 123,
'TotalResponseTime': 123.0
},
'ResponseTimeHistogram': [
{
'Value': 123.0,
'Count': 123
},
]
},
],
'ContainsOldGroupVersions': True|False,
}
**Response Structure**
- *(dict) --*
- **TimeSeriesServiceStatistics** *(list) --*
The collection of statistics.
- *(dict) --*
A list of TimeSeriesStatistic structures.
- **Timestamp** *(datetime) --*
Timestamp of the window for which statistics are aggregated.
- **EdgeSummaryStatistics** *(dict) --*
Response statistics for an edge.
- **OkCount** *(integer) --*
The number of requests that completed with a 2xx Success status code.
- **ErrorStatistics** *(dict) --*
Information about requests that failed with a 4xx Client Error status code.
- **ThrottleCount** *(integer) --*
The number of requests that failed with a 419 throttling status code.
- **OtherCount** *(integer) --*
The number of requests that failed with untracked 4xx Client Error status codes.
- **TotalCount** *(integer) --*
The total number of requests that failed with a 4xx Client Error status code.
- **FaultStatistics** *(dict) --*
Information about requests that failed with a 5xx Server Error status code.
- **OtherCount** *(integer) --*
The number of requests that failed with untracked 5xx Server Error status codes.
- **TotalCount** *(integer) --*
The total number of requests that failed with a 5xx Server Error status code.
- **TotalCount** *(integer) --*
The total number of completed requests.
- **TotalResponseTime** *(float) --*
The aggregate response time of completed requests.
- **ServiceSummaryStatistics** *(dict) --*
Response statistics for a service.
- **OkCount** *(integer) --*
The number of requests that completed with a 2xx Success status code.
- **ErrorStatistics** *(dict) --*
Information about requests that failed with a 4xx Client Error status code.
- **ThrottleCount** *(integer) --*
The number of requests that failed with a 419 throttling status code.
- **OtherCount** *(integer) --*
The number of requests that failed with untracked 4xx Client Error status codes.
- **TotalCount** *(integer) --*
The total number of requests that failed with a 4xx Client Error status code.
- **FaultStatistics** *(dict) --*
Information about requests that failed with a 5xx Server Error status code.
- **OtherCount** *(integer) --*
The number of requests that failed with untracked 5xx Server Error status codes.
- **TotalCount** *(integer) --*
The total number of requests that failed with a 5xx Server Error status code.
- **TotalCount** *(integer) --*
The total number of completed requests.
- **TotalResponseTime** *(float) --*
The aggregate response time of completed requests.
- **ResponseTimeHistogram** *(list) --*
The response time histogram for the selected entities.
- *(dict) --*
An entry in a histogram for a statistic. A histogram maps the range of observed values on the X axis, and the prevalence of each value on the Y axis.
- **Value** *(float) --*
The value of the entry.
- **Count** *(integer) --*
The prevalence of the entry.
- **ContainsOldGroupVersions** *(boolean) --*
A flag indicating whether or not a group's filter expression has been consistent, or if a returned aggregation may show statistics from an older version of the group's filter expression.
:type StartTime: datetime
:param StartTime: **[REQUIRED]**
The start of the time frame for which to aggregate statistics.
:type EndTime: datetime
:param EndTime: **[REQUIRED]**
The end of the time frame for which to aggregate statistics.
:type GroupName: string
:param GroupName:
The case-sensitive name of the group for which to pull statistics from.
:type GroupARN: string
:param GroupARN:
The ARN of the group for which to pull statistics from.
:type EntitySelectorExpression: string
:param EntitySelectorExpression:
A filter expression defining entities that will be aggregated for statistics. Supports ID, service, and edge functions. If no selector expression is specified, edge statistics are returned.
:type Period: integer
:param Period:
Aggregation period in seconds.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class GetTraceGraph(Paginator):
def paginate(self, TraceIds: List, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`XRay.Client.get_trace_graph`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetTraceGraph>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
TraceIds=[
'string',
],
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'Services': [
{
'ReferenceId': 123,
'Name': 'string',
'Names': [
'string',
],
'Root': True|False,
'AccountId': 'string',
'Type': 'string',
'State': 'string',
'StartTime': datetime(2015, 1, 1),
'EndTime': datetime(2015, 1, 1),
'Edges': [
{
'ReferenceId': 123,
'StartTime': datetime(2015, 1, 1),
'EndTime': datetime(2015, 1, 1),
'SummaryStatistics': {
'OkCount': 123,
'ErrorStatistics': {
'ThrottleCount': 123,
'OtherCount': 123,
'TotalCount': 123
},
'FaultStatistics': {
'OtherCount': 123,
'TotalCount': 123
},
'TotalCount': 123,
'TotalResponseTime': 123.0
},
'ResponseTimeHistogram': [
{
'Value': 123.0,
'Count': 123
},
],
'Aliases': [
{
'Name': 'string',
'Names': [
'string',
],
'Type': 'string'
},
]
},
],
'SummaryStatistics': {
'OkCount': 123,
'ErrorStatistics': {
'ThrottleCount': 123,
'OtherCount': 123,
'TotalCount': 123
},
'FaultStatistics': {
'OtherCount': 123,
'TotalCount': 123
},
'TotalCount': 123,
'TotalResponseTime': 123.0
},
'DurationHistogram': [
{
'Value': 123.0,
'Count': 123
},
],
'ResponseTimeHistogram': [
{
'Value': 123.0,
'Count': 123
},
]
},
],
}
**Response Structure**
- *(dict) --*
- **Services** *(list) --*
The services that have processed one of the specified requests.
- *(dict) --*
Information about an application that processed requests, users that made requests, or downstream services, resources and applications that an application used.
- **ReferenceId** *(integer) --*
Identifier for the service. Unique within the service map.
- **Name** *(string) --*
The canonical name of the service.
- **Names** *(list) --*
A list of names for the service, including the canonical name.
- *(string) --*
- **Root** *(boolean) --*
Indicates that the service was the first service to process a request.
- **AccountId** *(string) --*
Identifier of the AWS account in which the service runs.
- **Type** *(string) --*
The type of service.
* AWS Resource - The type of an AWS resource. For example, ``AWS::EC2::Instance`` for a application running on Amazon EC2 or ``AWS::DynamoDB::Table`` for an Amazon DynamoDB table that the application used.
* AWS Service - The type of an AWS service. For example, ``AWS::DynamoDB`` for downstream calls to Amazon DynamoDB that didn't target a specific table.
* ``client`` - Represents the clients that sent requests to a root service.
* ``remote`` - A downstream service of indeterminate type.
- **State** *(string) --*
The service's state.
- **StartTime** *(datetime) --*
The start time of the first segment that the service generated.
- **EndTime** *(datetime) --*
The end time of the last segment that the service generated.
- **Edges** *(list) --*
Connections to downstream services.
- *(dict) --*
Information about a connection between two services.
- **ReferenceId** *(integer) --*
Identifier of the edge. Unique within a service map.
- **StartTime** *(datetime) --*
The start time of the first segment on the edge.
- **EndTime** *(datetime) --*
The end time of the last segment on the edge.
- **SummaryStatistics** *(dict) --*
Response statistics for segments on the edge.
- **OkCount** *(integer) --*
The number of requests that completed with a 2xx Success status code.
- **ErrorStatistics** *(dict) --*
Information about requests that failed with a 4xx Client Error status code.
- **ThrottleCount** *(integer) --*
The number of requests that failed with a 419 throttling status code.
- **OtherCount** *(integer) --*
The number of requests that failed with untracked 4xx Client Error status codes.
- **TotalCount** *(integer) --*
The total number of requests that failed with a 4xx Client Error status code.
- **FaultStatistics** *(dict) --*
Information about requests that failed with a 5xx Server Error status code.
- **OtherCount** *(integer) --*
The number of requests that failed with untracked 5xx Server Error status codes.
- **TotalCount** *(integer) --*
The total number of requests that failed with a 5xx Server Error status code.
- **TotalCount** *(integer) --*
The total number of completed requests.
- **TotalResponseTime** *(float) --*
The aggregate response time of completed requests.
- **ResponseTimeHistogram** *(list) --*
A histogram that maps the spread of client response times on an edge.
- *(dict) --*
An entry in a histogram for a statistic. A histogram maps the range of observed values on the X axis, and the prevalence of each value on the Y axis.
- **Value** *(float) --*
The value of the entry.
- **Count** *(integer) --*
The prevalence of the entry.
- **Aliases** *(list) --*
Aliases for the edge.
- *(dict) --*
An alias for an edge.
- **Name** *(string) --*
The canonical name of the alias.
- **Names** *(list) --*
A list of names for the alias, including the canonical name.
- *(string) --*
- **Type** *(string) --*
The type of the alias.
- **SummaryStatistics** *(dict) --*
Aggregated statistics for the service.
- **OkCount** *(integer) --*
The number of requests that completed with a 2xx Success status code.
- **ErrorStatistics** *(dict) --*
Information about requests that failed with a 4xx Client Error status code.
- **ThrottleCount** *(integer) --*
The number of requests that failed with a 419 throttling status code.
- **OtherCount** *(integer) --*
The number of requests that failed with untracked 4xx Client Error status codes.
- **TotalCount** *(integer) --*
The total number of requests that failed with a 4xx Client Error status code.
- **FaultStatistics** *(dict) --*
Information about requests that failed with a 5xx Server Error status code.
- **OtherCount** *(integer) --*
The number of requests that failed with untracked 5xx Server Error status codes.
- **TotalCount** *(integer) --*
The total number of requests that failed with a 5xx Server Error status code.
- **TotalCount** *(integer) --*
The total number of completed requests.
- **TotalResponseTime** *(float) --*
The aggregate response time of completed requests.
- **DurationHistogram** *(list) --*
A histogram that maps the spread of service durations.
- *(dict) --*
An entry in a histogram for a statistic. A histogram maps the range of observed values on the X axis, and the prevalence of each value on the Y axis.
- **Value** *(float) --*
The value of the entry.
- **Count** *(integer) --*
The prevalence of the entry.
- **ResponseTimeHistogram** *(list) --*
A histogram that maps the spread of service response times.
- *(dict) --*
An entry in a histogram for a statistic. A histogram maps the range of observed values on the X axis, and the prevalence of each value on the Y axis.
- **Value** *(float) --*
The value of the entry.
- **Count** *(integer) --*
The prevalence of the entry.
:type TraceIds: list
:param TraceIds: **[REQUIRED]**
Trace IDs of requests for which to generate a service graph.
- *(string) --*
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class GetTraceSummaries(Paginator):
def paginate(self, StartTime: datetime, EndTime: datetime, TimeRangeType: str = None, Sampling: bool = None, SamplingStrategy: Dict = None, FilterExpression: str = None, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`XRay.Client.get_trace_summaries`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetTraceSummaries>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
StartTime=datetime(2015, 1, 1),
EndTime=datetime(2015, 1, 1),
TimeRangeType='TraceId'|'Event',
Sampling=True|False,
SamplingStrategy={
'Name': 'PartialScan'|'FixedRate',
'Value': 123.0
},
FilterExpression='string',
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'TraceSummaries': [
{
'Id': 'string',
'Duration': 123.0,
'ResponseTime': 123.0,
'HasFault': True|False,
'HasError': True|False,
'HasThrottle': True|False,
'IsPartial': True|False,
'Http': {
'HttpURL': 'string',
'HttpStatus': 123,
'HttpMethod': 'string',
'UserAgent': 'string',
'ClientIp': 'string'
},
'Annotations': {
'string': [
{
'AnnotationValue': {
'NumberValue': 123.0,
'BooleanValue': True|False,
'StringValue': 'string'
},
'ServiceIds': [
{
'Name': 'string',
'Names': [
'string',
],
'AccountId': 'string',
'Type': 'string'
},
]
},
]
},
'Users': [
{
'UserName': 'string',
'ServiceIds': [
{
'Name': 'string',
'Names': [
'string',
],
'AccountId': 'string',
'Type': 'string'
},
]
},
],
'ServiceIds': [
{
'Name': 'string',
'Names': [
'string',
],
'AccountId': 'string',
'Type': 'string'
},
],
'ResourceARNs': [
{
'ARN': 'string'
},
],
'InstanceIds': [
{
'Id': 'string'
},
],
'AvailabilityZones': [
{
'Name': 'string'
},
],
'EntryPoint': {
'Name': 'string',
'Names': [
'string',
],
'AccountId': 'string',
'Type': 'string'
},
'FaultRootCauses': [
{
'Services': [
{
'Name': 'string',
'Names': [
'string',
],
'Type': 'string',
'AccountId': 'string',
'EntityPath': [
{
'Name': 'string',
'Exceptions': [
{
'Name': 'string',
'Message': 'string'
},
],
'Remote': True|False
},
],
'Inferred': True|False
},
]
},
],
'ErrorRootCauses': [
{
'Services': [
{
'Name': 'string',
'Names': [
'string',
],
'Type': 'string',
'AccountId': 'string',
'EntityPath': [
{
'Name': 'string',
'Exceptions': [
{
'Name': 'string',
'Message': 'string'
},
],
'Remote': True|False
},
],
'Inferred': True|False
},
]
},
],
'ResponseTimeRootCauses': [
{
'Services': [
{
'Name': 'string',
'Names': [
'string',
],
'Type': 'string',
'AccountId': 'string',
'EntityPath': [
{
'Name': 'string',
'Coverage': 123.0,
'Remote': True|False
},
],
'Inferred': True|False
},
]
},
],
'Revision': 123,
'MatchedEventTime': datetime(2015, 1, 1)
},
],
'ApproximateTime': datetime(2015, 1, 1),
'TracesProcessedCount': 123,
}
**Response Structure**
- *(dict) --*
- **TraceSummaries** *(list) --*
Trace IDs and metadata for traces that were found in the specified time frame.
- *(dict) --*
Metadata generated from the segment documents in a trace.
- **Id** *(string) --*
The unique identifier for the request that generated the trace's segments and subsegments.
- **Duration** *(float) --*
The length of time in seconds between the start time of the root segment and the end time of the last segment that completed.
- **ResponseTime** *(float) --*
The length of time in seconds between the start and end times of the root segment. If the service performs work asynchronously, the response time measures the time before the response is sent to the user, while the duration measures the amount of time before the last traced activity completes.
- **HasFault** *(boolean) --*
One or more of the segment documents has a 500 series error.
- **HasError** *(boolean) --*
One or more of the segment documents has a 400 series error.
- **HasThrottle** *(boolean) --*
One or more of the segment documents has a 429 throttling error.
- **IsPartial** *(boolean) --*
One or more of the segment documents is in progress.
- **Http** *(dict) --*
Information about the HTTP request served by the trace.
- **HttpURL** *(string) --*
The request URL.
- **HttpStatus** *(integer) --*
The response status.
- **HttpMethod** *(string) --*
The request method.
- **UserAgent** *(string) --*
The request's user agent string.
- **ClientIp** *(string) --*
The IP address of the requestor.
- **Annotations** *(dict) --*
Annotations from the trace's segment documents.
- *(string) --*
- *(list) --*
- *(dict) --*
Information about a segment annotation.
- **AnnotationValue** *(dict) --*
Values of the annotation.
- **NumberValue** *(float) --*
Value for a Number annotation.
- **BooleanValue** *(boolean) --*
Value for a Boolean annotation.
- **StringValue** *(string) --*
Value for a String annotation.
- **ServiceIds** *(list) --*
Services to which the annotation applies.
- *(dict) --*
- **Name** *(string) --*
- **Names** *(list) --*
- *(string) --*
- **AccountId** *(string) --*
- **Type** *(string) --*
- **Users** *(list) --*
Users from the trace's segment documents.
- *(dict) --*
Information about a user recorded in segment documents.
- **UserName** *(string) --*
The user's name.
- **ServiceIds** *(list) --*
Services that the user's request hit.
- *(dict) --*
- **Name** *(string) --*
- **Names** *(list) --*
- *(string) --*
- **AccountId** *(string) --*
- **Type** *(string) --*
- **ServiceIds** *(list) --*
Service IDs from the trace's segment documents.
- *(dict) --*
- **Name** *(string) --*
- **Names** *(list) --*
- *(string) --*
- **AccountId** *(string) --*
- **Type** *(string) --*
- **ResourceARNs** *(list) --*
A list of resource ARNs for any resource corresponding to the trace segments.
- *(dict) --*
A list of resources ARNs corresponding to the segments in a trace.
- **ARN** *(string) --*
The ARN of a corresponding resource.
- **InstanceIds** *(list) --*
A list of EC2 instance IDs for any instance corresponding to the trace segments.
- *(dict) --*
A list of EC2 instance IDs corresponding to the segments in a trace.
- **Id** *(string) --*
The ID of a corresponding EC2 instance.
- **AvailabilityZones** *(list) --*
A list of availability zones for any zone corresponding to the trace segments.
- *(dict) --*
A list of availability zones corresponding to the segments in a trace.
- **Name** *(string) --*
The name of a corresponding availability zone.
- **EntryPoint** *(dict) --*
The root of a trace.
- **Name** *(string) --*
- **Names** *(list) --*
- *(string) --*
- **AccountId** *(string) --*
- **Type** *(string) --*
- **FaultRootCauses** *(list) --*
A collection of FaultRootCause structures corresponding to the the trace segments.
- *(dict) --*
The root cause information for a trace summary fault.
- **Services** *(list) --*
A list of corresponding services. A service identifies a segment and it contains a name, account ID, type, and inferred flag.
- *(dict) --*
A collection of fields identifying the services in a trace summary fault.
- **Name** *(string) --*
The service name.
- **Names** *(list) --*
A collection of associated service names.
- *(string) --*
- **Type** *(string) --*
The type associated to the service.
- **AccountId** *(string) --*
The account ID associated to the service.
- **EntityPath** *(list) --*
The path of root cause entities found on the service.
- *(dict) --*
A collection of segments and corresponding subsegments associated to a trace summary fault error.
- **Name** *(string) --*
The name of the entity.
- **Exceptions** *(list) --*
The types and messages of the exceptions.
- *(dict) --*
The exception associated with a root cause.
- **Name** *(string) --*
The name of the exception.
- **Message** *(string) --*
The message of the exception.
- **Remote** *(boolean) --*
A flag that denotes a remote subsegment.
- **Inferred** *(boolean) --*
A Boolean value indicating if the service is inferred from the trace.
- **ErrorRootCauses** *(list) --*
A collection of ErrorRootCause structures corresponding to the trace segments.
- *(dict) --*
The root cause of a trace summary error.
- **Services** *(list) --*
A list of services corresponding to an error. A service identifies a segment and it contains a name, account ID, type, and inferred flag.
- *(dict) --*
A collection of fields identifying the services in a trace summary error.
- **Name** *(string) --*
The service name.
- **Names** *(list) --*
A collection of associated service names.
- *(string) --*
- **Type** *(string) --*
The type associated to the service.
- **AccountId** *(string) --*
The account ID associated to the service.
- **EntityPath** *(list) --*
The path of root cause entities found on the service.
- *(dict) --*
A collection of segments and corresponding subsegments associated to a trace summary error.
- **Name** *(string) --*
The name of the entity.
- **Exceptions** *(list) --*
The types and messages of the exceptions.
- *(dict) --*
The exception associated with a root cause.
- **Name** *(string) --*
The name of the exception.
- **Message** *(string) --*
The message of the exception.
- **Remote** *(boolean) --*
A flag that denotes a remote subsegment.
- **Inferred** *(boolean) --*
A Boolean value indicating if the service is inferred from the trace.
- **ResponseTimeRootCauses** *(list) --*
A collection of ResponseTimeRootCause structures corresponding to the trace segments.
- *(dict) --*
The root cause information for a response time warning.
- **Services** *(list) --*
A list of corresponding services. A service identifies a segment and contains a name, account ID, type, and inferred flag.
- *(dict) --*
A collection of fields identifying the service in a response time warning.
- **Name** *(string) --*
The service name.
- **Names** *(list) --*
A collection of associated service names.
- *(string) --*
- **Type** *(string) --*
The type associated to the service.
- **AccountId** *(string) --*
The account ID associated to the service.
- **EntityPath** *(list) --*
The path of root cause entities found on the service.
- *(dict) --*
A collection of segments and corresponding subsegments associated to a response time warning.
- **Name** *(string) --*
The name of the entity.
- **Coverage** *(float) --*
The types and messages of the exceptions.
- **Remote** *(boolean) --*
A flag that denotes a remote subsegment.
- **Inferred** *(boolean) --*
A Boolean value indicating if the service is inferred from the trace.
- **Revision** *(integer) --*
The revision number of a trace.
- **MatchedEventTime** *(datetime) --*
The matched time stamp of a defined event.
- **ApproximateTime** *(datetime) --*
The start time of this page of results.
- **TracesProcessedCount** *(integer) --*
The total number of traces processed, including traces that did not match the specified filter expression.
:type StartTime: datetime
:param StartTime: **[REQUIRED]**
The start of the time frame for which to retrieve traces.
:type EndTime: datetime
:param EndTime: **[REQUIRED]**
The end of the time frame for which to retrieve traces.
:type TimeRangeType: string
:param TimeRangeType:
A parameter to indicate whether to query trace summaries by TraceId or Event time.
:type Sampling: boolean
:param Sampling:
Set to ``true`` to get summaries for only a subset of available traces.
:type SamplingStrategy: dict
:param SamplingStrategy:
A paramater to indicate whether to enable sampling on trace summaries. Input parameters are Name and Value.
- **Name** *(string) --*
The name of a sampling rule.
- **Value** *(float) --*
The value of a sampling rule.
:type FilterExpression: string
:param FilterExpression:
Specify a filter expression to retrieve trace summaries for services or requests that meet certain requirements.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
|
PypiClean
|
/redlock-py-catch-1.1.0.tar.gz/redlock-py-catch-1.1.0/redlock/cli.py
|
from __future__ import print_function
import argparse
import sys
import textwrap
import redlock
def log(*args, **kwargs):
if not log.quiet:
print(*args, file=sys.stderr, **kwargs)
def lock(name, validity, redis, retry_count=3, retry_delay=200, **kwargs):
if retry_count < 0:
retry_count = 0
is_blocking = True
else:
is_blocking = False
while True:
try:
dlm = redlock.Redlock(redis, retry_count=retry_count+1, retry_delay=retry_delay / 1000.0)
lock = dlm.lock(name, validity)
if lock is False:
log("failed")
err = 1
else:
log("ok")
print(lock.key)
return 0
except Exception as e:
log("error %s" % e)
err = 3
if is_blocking:
# redlock already slept for retry-delay
continue
else:
return err
def unlock(name, key, redis, **kwargs):
try:
dlm = redlock.Redlock(redis)
lock = redlock.Lock(0, name, key)
dlm.unlock(lock)
except Exception as e:
log("Error: %s" % e)
return 3
log("ok")
return 0
def main():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=textwrap.dedent("""
Return codes:
0 if the action succeeded
1 if the action failed (eg. timed out)
2 if there was an error with the options
3 if there was an error communicating with Redis (eg. socket timeout)
"""))
parser.add_argument("--redis", action="append", default=None, help="Redis URL (eg. redis://localhost:6379/0", metavar="URL")
parser.add_argument("--quiet", action="store_true", default=False, help="No stderr output, just a return code (and key for lock action)")
subparsers = parser.add_subparsers(help='See command help via `%(prog)s <command> --help`')
parser_lock = subparsers.add_parser('lock', help='Acquire a lock', description="""
For non-blocking behaviour, set --retry-count=0 and --retry-delay=0.
For infinitely blocking behaviour with retries every second, set --retry-count=-1 and --retry-delay=1000.
""")
parser_lock.set_defaults(func=lock)
parser_lock.add_argument("--retry-count", type=int, default=3, help="Number of retries")
parser_lock.add_argument("--retry-delay", type=int, default=200, help="Milliseconds between retries")
parser_lock.add_argument("name", help="Lock resource name")
parser_lock.add_argument("validity", type=int, help="Number of milliseconds the lock will be valid.")
parser_unlock = subparsers.add_parser('unlock', help='Release a lock')
parser_unlock.set_defaults(func=unlock)
parser_unlock.add_argument("name", help="Lock resource name")
parser_unlock.add_argument("key", help="Result returned by a prior 'lock' command")
args = parser.parse_args()
log.quiet = args.quiet
if not args.redis:
args.redis = ["redis://localhost:6379/0"]
result = args.func(**vars(args))
sys.exit(result)
if __name__ == "__main__":
main()
|
PypiClean
|
/resources/GovGAST1.py
|
from functools import cached_property
from pydantic import Field
from pydantic.dataclasses import dataclass
from .Base import DataclassConfig, Profile
from .TurbineGovernorDynamics import TurbineGovernorDynamics
@dataclass(config=DataclassConfig)
class GovGAST1(TurbineGovernorDynamics):
"""
Modified single shaft gas turbine.
mwbase: Base for power values (MWbase) (> 0). Unit = MW.
r: Permanent droop (R) (>0). Typical value = 0,04.
t1: Governor mechanism time constant (T1) (>= 0). T1 represents the natural valve positioning time constant of the
governor for small disturbances, as seen when rate limiting is not in effect. Typical value = 0,5.
t2: Turbine power time constant (T2) (>= 0). T2 represents delay due to internal energy storage of the gas turbine
engine. T2 can be used to give a rough approximation to the delay associated with acceleration of the
compressor spool of a multi-shaft engine, or with the compressibility of gas in the plenum of the free
power turbine of an aero-derivative unit, for example. Typical value = 0,5.
t3: Turbine exhaust temperature time constant (T3) (>= 0). T3 represents delay in the exhaust temperature and load
limiting system. Typical value = 3.
lmax: Ambient temperature load limit (Lmax). Lmax is the turbine power output corresponding to the limiting exhaust
gas temperature. Typical value = 1.
kt: Temperature limiter gain (Kt). Typical value = 3.
vmax: Maximum turbine power, PU of MWbase (Vmax) (> GovGAST1.vmin). Typical value = 1.
vmin: Minimum turbine power, PU of MWbase (Vmin) (< GovGAST1.vmax). Typical value = 0.
fidle: Fuel flow at zero power output (Fidle). Typical value = 0,18.
rmax: Maximum fuel valve opening rate (Rmax). Unit = PU / s. Typical value = 1.
loadinc: Valve position change allowed at fast rate (Loadinc). Typical value = 0,05.
tltr: Valve position averaging time constant (Tltr) (>= 0). Typical value = 10.
ltrate: Maximum long term fuel valve opening rate (Ltrate). Typical value = 0,02.
a: Turbine power time constant numerator scale factor (a). Typical value = 0,8.
b: Turbine power time constant denominator scale factor (b) (>0). Typical value = 1.
db1: Intentional dead-band width (db1). Unit = Hz. Typical value = 0.
eps: Intentional db hysteresis (eps). Unit = Hz. Typical value = 0.
db2: Unintentional dead-band (db2). Unit = MW. Typical value = 0.
gv1: Nonlinear gain point 1, PU gv (Gv1). Typical value = 0.
pgv1: Nonlinear gain point 1, PU power (Pgv1). Typical value = 0.
gv2: Nonlinear gain point 2,PU gv (Gv2). Typical value = 0.
pgv2: Nonlinear gain point 2, PU power (Pgv2). Typical value = 0.
gv3: Nonlinear gain point 3, PU gv (Gv3). Typical value = 0.
pgv3: Nonlinear gain point 3, PU power (Pgv3). Typical value = 0.
gv4: Nonlinear gain point 4, PU gv (Gv4). Typical value = 0.
pgv4: Nonlinear gain point 4, PU power (Pgv4). Typical value = 0.
gv5: Nonlinear gain point 5, PU gv (Gv5). Typical value = 0.
pgv5: Nonlinear gain point 5, PU power (Pgv5). Typical value = 0.
gv6: Nonlinear gain point 6, PU gv (Gv6). Typical value = 0.
pgv6: Nonlinear gain point 6, PU power (Pgv6). Typical value = 0.
ka: Governor gain (Ka). Typical value = 0.
t4: Governor lead time constant (T4) (>= 0). Typical value = 0.
t5: Governor lag time constant (T5) (>= 0). If = 0, entire gain and lead-lag block is bypassed. Typical value = 0.
"""
mwbase: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
r: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
t1: int = Field(
default=0,
in_profiles=[
Profile.DY,
],
)
t2: int = Field(
default=0,
in_profiles=[
Profile.DY,
],
)
t3: int = Field(
default=0,
in_profiles=[
Profile.DY,
],
)
lmax: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
kt: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
vmax: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
vmin: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
fidle: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
rmax: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
loadinc: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
tltr: int = Field(
default=0,
in_profiles=[
Profile.DY,
],
)
ltrate: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
a: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
b: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
db1: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
eps: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
db2: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
gv1: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
pgv1: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
gv2: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
pgv2: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
gv3: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
pgv3: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
gv4: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
pgv4: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
gv5: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
pgv5: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
gv6: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
pgv6: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
ka: float = Field(
default=0.0,
in_profiles=[
Profile.DY,
],
)
t4: int = Field(
default=0,
in_profiles=[
Profile.DY,
],
)
t5: int = Field(
default=0,
in_profiles=[
Profile.DY,
],
)
@cached_property
def possible_profiles(self) -> set[Profile]:
"""
A resource can be used by multiple profiles. This is the set of profiles
where this element can be found.
"""
return {
Profile.DY,
}
|
PypiClean
|
/dagster_pandas-1.0.5-py3-none-any.whl/dagster_pandas/validation.py
|
from dagster_pandas.constraints import (
CategoricalColumnConstraint,
ColumnDTypeFnConstraint,
ColumnDTypeInSetConstraint,
Constraint,
ConstraintViolationException,
DataFrameConstraint,
InRangeColumnConstraint,
NonNullableColumnConstraint,
UniqueColumnConstraint,
)
from pandas import DataFrame, Timestamp
from pandas.core.dtypes.common import (
is_bool_dtype,
is_float_dtype,
is_integer_dtype,
is_numeric_dtype,
is_string_dtype,
)
from dagster import DagsterInvariantViolationError
from dagster import _check as check
PANDAS_NUMERIC_TYPES = {"int64", "float"}
def _construct_keyword_constraints(non_nullable, unique, ignore_missing_vals):
non_nullable = check.bool_param(non_nullable, "exists")
unique = check.bool_param(unique, "unique")
ignore_missing_vals = check.bool_param(ignore_missing_vals, "ignore_missing_vals")
if non_nullable and ignore_missing_vals:
raise DagsterInvariantViolationError(
"PandasColumn cannot have a non-null constraint while also ignore missing values"
)
constraints = []
if non_nullable:
constraints.append(NonNullableColumnConstraint())
if unique:
constraints.append(UniqueColumnConstraint(ignore_missing_vals=ignore_missing_vals))
return constraints
class PandasColumn:
"""
The main API for expressing column level schemas and constraints for your custom dataframe
types.
Args:
name (str): Name of the column. This must match up with the column name in the dataframe you
expect to receive.
is_required (Optional[bool]): Flag indicating the optional/required presence of the column.
If th column exists, the validate function will validate the column. Defaults to True.
constraints (Optional[List[Constraint]]): List of constraint objects that indicate the
validation rules for the pandas column.
"""
def __init__(self, name, constraints=None, is_required=None):
self.name = check.str_param(name, "name")
self.is_required = check.opt_bool_param(is_required, "is_required", default=True)
self.constraints = check.opt_list_param(constraints, "constraints", of_type=Constraint)
def validate(self, dataframe):
if self.name not in dataframe.columns:
# Ignore validation if column is missing from dataframe and is not required
if self.is_required:
raise ConstraintViolationException(
"Required column {column_name} not in dataframe with columns {dataframe_columns}".format(
column_name=self.name, dataframe_columns=dataframe.columns
)
)
else:
for constraint in self.constraints:
constraint.validate(dataframe, self.name)
@staticmethod
def exists(name, non_nullable=False, unique=False, ignore_missing_vals=False, is_required=None):
"""
Simple constructor for PandasColumns that expresses existence constraints.
Args:
name (str): Name of the column. This must match up with the column name in the dataframe you
expect to receive.
non_nullable (Optional[bool]): If true, this column will enforce a constraint that all values in the column
ought to be non null values.
unique (Optional[bool]): If true, this column will enforce a uniqueness constraint on the column values.
ignore_missing_vals (Optional[bool]): A flag that is passed into most constraints. If true, the constraint will
only evaluate non-null data. Ignore_missing_vals and non_nullable cannot both be True.
is_required (Optional[bool]): Flag indicating the optional/required presence of the column.
If the column exists the validate function will validate the column. Default to True.
"""
return PandasColumn(
name=check.str_param(name, "name"),
constraints=_construct_keyword_constraints(
non_nullable=non_nullable, unique=unique, ignore_missing_vals=ignore_missing_vals
),
is_required=is_required,
)
@staticmethod
def boolean_column(
name, non_nullable=False, unique=False, ignore_missing_vals=False, is_required=None
):
"""
Simple constructor for PandasColumns that expresses boolean constraints on boolean dtypes.
Args:
name (str): Name of the column. This must match up with the column name in the dataframe you
expect to receive.
non_nullable (Optional[bool]): If true, this column will enforce a constraint that all values in the column
ought to be non null values.
unique (Optional[bool]): If true, this column will enforce a uniqueness constraint on the column values.
ignore_missing_vals (Optional[bool]): A flag that is passed into most constraints. If true, the constraint will
only evaluate non-null data. Ignore_missing_vals and non_nullable cannot both be True.
is_required (Optional[bool]): Flag indicating the optional/required presence of the column.
If the column exists the validate function will validate the column. Default to True.
"""
return PandasColumn(
name=check.str_param(name, "name"),
constraints=[ColumnDTypeFnConstraint(is_bool_dtype)]
+ _construct_keyword_constraints(
non_nullable=non_nullable, unique=unique, ignore_missing_vals=ignore_missing_vals
),
is_required=is_required,
)
@staticmethod
def numeric_column(
name,
min_value=-float("inf"),
max_value=float("inf"),
non_nullable=False,
unique=False,
ignore_missing_vals=False,
is_required=None,
):
"""
Simple constructor for PandasColumns that expresses numeric constraints numeric dtypes.
Args:
name (str): Name of the column. This must match up with the column name in the dataframe you
expect to receive.
min_value (Optional[Union[int,float]]): The lower bound for values you expect in this column. Defaults to -float('inf')
max_value (Optional[Union[int,float]]): The upper bound for values you expect in this column. Defaults to float('inf')
non_nullable (Optional[bool]): If true, this column will enforce a constraint that all values in the column
ought to be non null values.
unique (Optional[bool]): If true, this column will enforce a uniqueness constraint on the column values.
ignore_missing_vals (Optional[bool]): A flag that is passed into most constraints. If true, the constraint will
only evaluate non-null data. Ignore_missing_vals and non_nullable cannot both be True.
is_required (Optional[bool]): Flag indicating the optional/required presence of the column.
If the column exists the validate function will validate the column. Default to True.
"""
return PandasColumn(
name=check.str_param(name, "name"),
constraints=[
ColumnDTypeFnConstraint(is_numeric_dtype),
InRangeColumnConstraint(
check.numeric_param(min_value, "min_value"),
check.numeric_param(max_value, "max_value"),
ignore_missing_vals=ignore_missing_vals,
),
]
+ _construct_keyword_constraints(
non_nullable=non_nullable, unique=unique, ignore_missing_vals=ignore_missing_vals
),
is_required=is_required,
)
@staticmethod
def integer_column(
name,
min_value=-float("inf"),
max_value=float("inf"),
non_nullable=False,
unique=False,
ignore_missing_vals=False,
is_required=None,
):
"""
Simple constructor for PandasColumns that expresses numeric constraints on integer dtypes.
Args:
name (str): Name of the column. This must match up with the column name in the dataframe you
expect to receive.
min_value (Optional[Union[int,float]]): The lower bound for values you expect in this column. Defaults to -float('inf')
max_value (Optional[Union[int,float]]): The upper bound for values you expect in this column. Defaults to float('inf')
non_nullable (Optional[bool]): If true, this column will enforce a constraint that all values in the column
ought to be non null values.
unique (Optional[bool]): If true, this column will enforce a uniqueness constraint on the column values.
ignore_missing_vals (Optional[bool]): A flag that is passed into most constraints. If true, the constraint will
only evaluate non-null data. Ignore_missing_vals and non_nullable cannot both be True.
is_required (Optional[bool]): Flag indicating the optional/required presence of the column.
If the column exists the validate function will validate the column. Default to True.
"""
return PandasColumn(
name=check.str_param(name, "name"),
constraints=[
ColumnDTypeFnConstraint(is_integer_dtype),
InRangeColumnConstraint(
check.numeric_param(min_value, "min_value"),
check.numeric_param(max_value, "max_value"),
ignore_missing_vals=ignore_missing_vals,
),
]
+ _construct_keyword_constraints(
non_nullable=non_nullable, unique=unique, ignore_missing_vals=ignore_missing_vals
),
is_required=is_required,
)
@staticmethod
def float_column(
name,
min_value=-float("inf"),
max_value=float("inf"),
non_nullable=False,
unique=False,
ignore_missing_vals=False,
is_required=None,
):
"""
Simple constructor for PandasColumns that expresses numeric constraints on float dtypes.
Args:
name (str): Name of the column. This must match up with the column name in the dataframe you
expect to receive.
min_value (Optional[Union[int,float]]): The lower bound for values you expect in this column. Defaults to -float('inf')
max_value (Optional[Union[int,float]]): The upper bound for values you expect in this column. Defaults to float('inf')
non_nullable (Optional[bool]): If true, this column will enforce a constraint that all values in the column
ought to be non null values.
unique (Optional[bool]): If true, this column will enforce a uniqueness constraint on the column values.
ignore_missing_vals (Optional[bool]): A flag that is passed into most constraints. If true, the constraint will
only evaluate non-null data. Ignore_missing_vals and non_nullable cannot both be True.
is_required (Optional[bool]): Flag indicating the optional/required presence of the column.
If the column exists the validate function will validate the column. Default to True.
"""
return PandasColumn(
name=check.str_param(name, "name"),
constraints=[
ColumnDTypeFnConstraint(is_float_dtype),
InRangeColumnConstraint(
check.numeric_param(min_value, "min_value"),
check.numeric_param(max_value, "max_value"),
ignore_missing_vals=ignore_missing_vals,
),
]
+ _construct_keyword_constraints(
non_nullable=non_nullable, unique=unique, ignore_missing_vals=ignore_missing_vals
),
is_required=is_required,
)
@staticmethod
def datetime_column(
name,
min_datetime=Timestamp.min,
max_datetime=Timestamp.max,
non_nullable=False,
unique=False,
ignore_missing_vals=False,
is_required=None,
tz=None,
):
"""
Simple constructor for PandasColumns that expresses datetime constraints on 'datetime64[ns]' dtypes.
Args:
name (str): Name of the column. This must match up with the column name in the dataframe you
expect to receive.
min_datetime (Optional[Union[int,float]]): The lower bound for values you expect in this column.
Defaults to pandas.Timestamp.min.
max_datetime (Optional[Union[int,float]]): The upper bound for values you expect in this column.
Defaults to pandas.Timestamp.max.
non_nullable (Optional[bool]): If true, this column will enforce a constraint that all values in the column
ought to be non null values.
unique (Optional[bool]): If true, this column will enforce a uniqueness constraint on the column values.
ignore_missing_vals (Optional[bool]): A flag that is passed into most constraints. If true, the constraint will
only evaluate non-null data. Ignore_missing_vals and non_nullable cannot both be True.
is_required (Optional[bool]): Flag indicating the optional/required presence of the column.
If the column exists the validate function will validate the column. Default to True.
tz (Optional[str]): Required timezone for values eg: tz='UTC', tz='Europe/Dublin', tz='US/Eastern'.
Defaults to None, meaning naive datetime values.
"""
if tz is None:
datetime_constraint = ColumnDTypeInSetConstraint({"datetime64[ns]"})
else:
datetime_constraint = ColumnDTypeInSetConstraint({f"datetime64[ns, {tz}]"})
# One day more/less than absolute min/max to prevent OutOfBoundsDatetime errors when converting min/max to be tz aware
if min_datetime.tz_localize(None) == Timestamp.min:
min_datetime = Timestamp("1677-09-22 00:12:43.145225Z")
if max_datetime.tz_localize(None) == Timestamp.max:
max_datetime = Timestamp("2262-04-10 23:47:16.854775807Z")
# Convert bounds to same tz
if Timestamp(min_datetime).tz is None:
min_datetime = Timestamp(min_datetime).tz_localize(tz)
if Timestamp(max_datetime).tz is None:
max_datetime = Timestamp(max_datetime).tz_localize(tz)
return PandasColumn(
name=check.str_param(name, "name"),
constraints=[
datetime_constraint,
InRangeColumnConstraint(
min_datetime, max_datetime, ignore_missing_vals=ignore_missing_vals
),
]
+ _construct_keyword_constraints(
non_nullable=non_nullable, unique=unique, ignore_missing_vals=ignore_missing_vals
),
is_required=is_required,
)
@staticmethod
def string_column(
name, non_nullable=False, unique=False, ignore_missing_vals=False, is_required=None
):
"""
Simple constructor for PandasColumns that expresses constraints on string dtypes.
Args:
name (str): Name of the column. This must match up with the column name in the dataframe you
expect to receive.
non_nullable (Optional[bool]): If true, this column will enforce a constraint that all values in the column
ought to be non null values.
unique (Optional[bool]): If true, this column will enforce a uniqueness constraint on the column values.
ignore_missing_vals (Optional[bool]): A flag that is passed into most constraints. If true, the constraint will
only evaluate non-null data. Ignore_missing_vals and non_nullable cannot both be True.
is_required (Optional[bool]): Flag indicating the optional/required presence of the column.
If the column exists the validate function will validate the column. Default to True.
"""
return PandasColumn(
name=check.str_param(name, "name"),
constraints=[ColumnDTypeFnConstraint(is_string_dtype)]
+ _construct_keyword_constraints(
non_nullable=non_nullable, unique=unique, ignore_missing_vals=ignore_missing_vals
),
is_required=is_required,
)
@staticmethod
def categorical_column(
name,
categories,
of_types=frozenset({"category", "object"}),
non_nullable=False,
unique=False,
ignore_missing_vals=False,
is_required=None,
):
"""
Simple constructor for PandasColumns that expresses categorical constraints on specified dtypes.
Args:
name (str): Name of the column. This must match up with the column name in the dataframe you
expect to receive.
categories (List[Any]): The valid set of buckets that all values in the column must match.
of_types (Optional[Union[str, Set[str]]]): The expected dtype[s] that your categories and values must
abide by.
non_nullable (Optional[bool]): If true, this column will enforce a constraint that all values in
the column ought to be non null values.
unique (Optional[bool]): If true, this column will enforce a uniqueness constraint on the column values.
ignore_missing_vals (Optional[bool]): A flag that is passed into most constraints. If true, the
constraint will only evaluate non-null data. Ignore_missing_vals and non_nullable cannot both be True.
is_required (Optional[bool]): Flag indicating the optional/required presence of the column.
If the column exists the validate function will validate the column. Default to True.
"""
of_types = {of_types} if isinstance(of_types, str) else of_types
return PandasColumn(
name=check.str_param(name, "name"),
constraints=[
ColumnDTypeInSetConstraint(of_types),
CategoricalColumnConstraint(categories, ignore_missing_vals=ignore_missing_vals),
]
+ _construct_keyword_constraints(
non_nullable=non_nullable, unique=unique, ignore_missing_vals=ignore_missing_vals
),
is_required=is_required,
)
def validate_constraints(dataframe, pandas_columns=None, dataframe_constraints=None):
dataframe = check.inst_param(dataframe, "dataframe", DataFrame)
pandas_columns = check.opt_list_param(
pandas_columns, "column_constraints", of_type=PandasColumn
)
dataframe_constraints = check.opt_list_param(
dataframe_constraints, "dataframe_constraints", of_type=DataFrameConstraint
)
if pandas_columns:
for column in pandas_columns:
column.validate(dataframe)
if dataframe_constraints:
for dataframe_constraint in dataframe_constraints:
dataframe_constraint.validate(dataframe)
|
PypiClean
|
/glibc-0.6.1.tar.gz/glibc-0.6.1/pyglibc/select.py
|
from __future__ import absolute_import
from ctypes import POINTER
from ctypes import byref
from ctypes import cast
from errno import EBADF
from threading import Lock
from glibc import EPOLLERR
from glibc import EPOLLET
from glibc import EPOLLHUP
from glibc import EPOLLIN
from glibc import EPOLLMSG
from glibc import EPOLLONESHOT
from glibc import EPOLLOUT
from glibc import EPOLLPRI
from glibc import EPOLLRDBAND
from glibc import EPOLLRDHUP
from glibc import EPOLLRDNORM
from glibc import EPOLLWRBAND
from glibc import EPOLLWRNORM
from glibc import EPOLL_CLOEXEC
from glibc import EPOLL_CTL_ADD
from glibc import EPOLL_CTL_DEL
from glibc import EPOLL_CTL_MOD
from glibc import FD_SETSIZE
from glibc import close
from glibc import epoll_create1
from glibc import epoll_ctl
from glibc import epoll_event
from glibc import epoll_wait
__author__ = 'Zygmunt Krynicki <[email protected]>'
__version__ = '1.0' # Let's claim this is complete and fix issues, if any
__all__ = ['epoll', 'EPOLL_CLOEXEC', 'EPOLLIN', 'EPOLLOUT', 'EPOLLPRI',
'EPOLLERR', 'EPOLLHUP', 'EPOLLET', 'EPOLLONESHOT', 'EPOLLRDNORM',
'EPOLLRDBAND', 'EPOLLWRNORM', 'EPOLLWRBAND', 'EPOLLMSG']
# NOTE: Extra features not present in Python 3.4
__all__ += ['EPOLLRDHUP']
def _err_closed():
raise ValueError("I/O operation on closed epoll object")
class epoll(object):
"""
Pure-python reimplementation of :class:`select.epoll` from Python 3.4
compatible with Python 2.7+.
"""
# Somewhat inefficient lock acquired on each call to epoll.close() to
# ensure that we match semantics from python stdlib where close can be
# called concurrently.
_close_lock = Lock()
def __init__(self, sizehint=-1, flags=0):
"""
:param sizehint:
Dummy argument for compatibility with select.epoll, ignored.
:param flags:
Flags passed to ``epoll_create1()``. Note that internally flags are
always OR-ed with EPOLL_CLOEXEC, matching what Python 3.4 does, so
passing 0 is perfectly fine.
"""
self._epfd = -1
self._epfd = epoll_create1(flags | EPOLL_CLOEXEC)
def __enter__(self):
"""
Enter a context manager
:returns:
self
:raises ValueError:
If :meth:`closed()` is True
"""
if self._epfd < 0:
_err_closed()
return self
def __exit__(self, *args):
"""
Exit a context manager
This method calls :meth:`close()`.
"""
self.close()
def close(self):
"""
Close the internal epoll file descriptor if it isn't closed
:raises OSError:
If the underlying ``close(2)`` fails. The error message matches
those found in the manual page.
"""
with self._close_lock:
epfd = self._epfd
if epfd >= 0:
self._epfd = -1
close(epfd)
@property
def closed(self):
"""
property indicating if the internal epoll descriptor was closed
"""
return self._epfd < 0
def fileno(self):
"""
Get the descriptor number obtained from ``epoll_create1()(2)``
:returns:
The descriptor number
:raises ValueError:
If :meth:`closed()` is True
"""
if self._epfd < 0:
_err_closed()
return self._epfd
@classmethod
def fromfd(cls, fd):
"""
Create a new epoll object from a given file descriptor
:param fd:
A pre-made file descriptor obtained from ``epoll_create(2)`` or
``epoll_create1(2)``
:raises ValueError:
If fd is not a valid file descriptor
:returns:
A new epoll object
.. note::
If the passed descriptor is incorrect then various methods will
fail and raise OSError with an appropriate message.
"""
if fd < 0:
_err_closed()
self = cls.__new__()
object.__init__(self)
self._epfd = fd
return self
def register(self, fd, eventmask=None):
"""
Register a new descriptor
:param fd:
The descriptor to register.
:param eventmask:
Bit-mask of events that will be monitored. By default EPOLLIN,
EPOLLOUT and EPOLLPRI are used. Note that EPOLLHUP is implicit and
doesn't need to be provided.
:raises ValueError:
If :meth:`closed()` is True
:raises OSError:
If the underlying ``epoll_ctl(2)`` fails. The error message matches
those found in the manual page.
"""
if self._epfd < 0:
_err_closed()
if eventmask is None:
eventmask = EPOLLIN | EPOLLOUT | EPOLLPRI
ev = epoll_event()
ev.events = eventmask
ev.data.fd = fd
epoll_ctl(self._epfd, EPOLL_CTL_ADD, fd, byref(ev))
def unregister(self, fd):
"""
Unregister a previously registered descriptor
:param fd:
The descriptor to unregister
:raises ValueError:
If :meth:`closed()` is True
:raises OSError:
If the underlying ``epoll_ctl(2)`` fails. The error message matches
those found in the manual page.
.. note::
For feature parity with Python 3.4, unlike what ``epoll_ctl(2)``
would do, we are silently ignoring ``EBADF`` which is raised if
"""
if self._epfd < 0:
_err_closed()
ev = epoll_event()
try:
epoll_ctl(self._epfd, EPOLL_CTL_DEL, fd, byref(ev))
except OSError as exc:
# Allow fd to be closed, matching Python 3.4
if exc.errno != EBADF:
raise
def modify(self, fd, eventmask):
"""
Change the bit-mask of events associated with a previously-registered
descriptor.
:param fd:
The descriptor to modify.
:param eventmask:
New bit-mask of events that will be monitored.
:raises ValueError:
If :meth:`closed()` is True
:raises OSError:
If the underlying ``epoll_ctl(2)`` fails. The error message matches
those found in the manual page.
"""
if self._epfd < 0:
_err_closed()
ev = epoll_event()
ev.events = eventmask
ev.data.fd = fd
epoll_ctl(self._epfd, EPOLL_CTL_MOD, fd, byref(ev))
def poll(self, timeout=-1, maxevents=-1):
"""
Poll for events
:param timeout:
The amount of seconds to wait for events before giving up. The
default value, -1, represents infinity. Note that unlike the
underlying ``epoll_wait()`` timeout is a fractional number
representing **seconds**.
:param maxevents:
The maximum number of events to report. The default is a
reasonably-sized maximum, identical to the one selected by
Python 3.4.
:returns:
A list of (fd, events) that were reported or an empty list if the
timeout elapsed.
:raises ValueError:
If :meth:`closed()` is True
:raises OSError:
If the underlying ``epoll_wait(2)`` fails. The error message
matches those found in the manual page.
"""
if self._epfd < 0:
_err_closed()
if timeout != -1:
# 1000 because epoll_wait(2) uses milliseconds
timeout = int(timeout * 1000)
if maxevents == -1:
maxevents = FD_SETSIZE - 1
events = (epoll_event * maxevents)()
num_events = epoll_wait(
self._epfd, cast(byref(events), POINTER(epoll_event)),
maxevents, timeout)
return [(events[i].data.fd, events[i].events)
for i in range(num_events)]
|
PypiClean
|
/pytorch_forecasting-1.0.0-py3-none-any.whl/pytorch_forecasting/metrics/_mqf2_utils.py
|
from typing import List, Optional, Tuple
from cpflows.flows import DeepConvexFlow, SequentialFlow
import torch
from torch.distributions import AffineTransform, Distribution, Normal, TransformedDistribution
import torch.nn.functional as F
class DeepConvexNet(DeepConvexFlow):
r"""
Class that takes a partially input convex neural network (picnn)
as input and equips it with functions of logdet
computation (both estimation and exact computation)
This class is based on DeepConvexFlow of the CP-Flow
repo (https://github.com/CW-Huang/CP-Flow)
For details of the logdet estimator, see
``Convex potential flows: Universal probability distributions
with optimal transport and convex optimization``
Parameters
----------
picnn
A partially input convex neural network (picnn)
dim
Dimension of the input
is_energy_score
Indicates if energy score is used as the objective function
If yes, the network is not required to be strictly convex,
so we can just use the picnn
otherwise, a quadratic term is added to the output of picnn
to render it strictly convex
m1
Dimension of the Krylov subspace of the Lanczos tridiagonalization
used in approximating H of logdet(H)
m2
Iteration number of the conjugate gradient algorithm
used to approximate logdet(H)
rtol
relative tolerance of the conjugate gradient algorithm
atol
absolute tolerance of the conjugate gradient algorithm
"""
def __init__(
self,
picnn: torch.nn.Module,
dim: int,
is_energy_score: bool = False,
estimate_logdet: bool = False,
m1: int = 10,
m2: Optional[int] = None,
rtol: float = 0.0,
atol: float = 1e-3,
) -> None:
super().__init__(
picnn,
dim,
m1=m1,
m2=m2,
rtol=rtol,
atol=atol,
)
self.picnn = self.icnn
self.is_energy_score = is_energy_score
self.estimate_logdet = estimate_logdet
def get_potential(self, x: torch.Tensor, context: Optional[torch.Tensor] = None) -> torch.Tensor:
n = x.size(0)
output = self.picnn(x, context)
if self.is_energy_score:
return output
else:
return F.softplus(self.w1) * output + F.softplus(self.w0) * (x.view(n, -1) ** 2).sum(1, keepdim=True) / 2
def forward_transform(
self,
x: torch.Tensor,
logdet: Optional[torch.Tensor] = 0,
context: Optional[torch.Tensor] = None,
extra: Optional[torch.Tensor] = None,
) -> Tuple[torch.Tensor, torch.Tensor]:
if self.estimate_logdet:
return self.forward_transform_stochastic(x, logdet, context=context, extra=extra)
else:
return self.forward_transform_bruteforce(x, logdet, context=context)
class SequentialNet(SequentialFlow):
r"""
Class that combines a list of DeepConvexNet and ActNorm
layers and provides energy score computation
This class is based on SequentialFlow of the CP-Flow repo
(https://github.com/CW-Huang/CP-Flow)
Parameters
----------
networks
list of DeepConvexNet and/or ActNorm instances
"""
def __init__(self, networks: List[torch.nn.Module]) -> None:
super().__init__(networks)
self.networks = self.flows
def forward(self, x: torch.Tensor, context: Optional[torch.Tensor] = None) -> torch.Tensor:
for network in self.networks:
if isinstance(network, DeepConvexNet):
x = network.forward(x, context=context)
else:
x = network.forward(x)
return x
def es_sample(self, hidden_state: torch.Tensor, dimension: int) -> torch.Tensor:
"""
Auxiliary function for energy score computation
Drawing samples conditioned on the hidden state
Parameters
----------
hidden_state
hidden_state which the samples conditioned
on (num_samples, hidden_size)
dimension
dimension of the input
Returns
-------
samples
samples drawn (num_samples, dimension)
"""
num_samples = hidden_state.shape[0]
zero = torch.tensor(0, dtype=hidden_state.dtype, device=hidden_state.device)
one = torch.ones_like(zero)
standard_normal = Normal(zero, one)
samples = self.forward(
standard_normal.sample([num_samples * dimension]).view(num_samples, dimension),
context=hidden_state,
)
return samples
def energy_score(
self,
z: torch.Tensor,
hidden_state: torch.Tensor,
es_num_samples: int = 50,
beta: float = 1.0,
) -> torch.Tensor:
"""
Computes the (approximated) energy score sum_i ES(g,z_i),
where ES(g,z_i) =
-1/(2*es_num_samples^2) * sum_{w,w'} ||w-w'||_2^beta
+ 1/es_num_samples * sum_{w''} ||w''-z_i||_2^beta,
w's are samples drawn from the
quantile function g(., h_i) (gradient of picnn),
h_i is the hidden state associated with z_i,
and es_num_samples is the number of samples drawn
for each of w, w', w'' in energy score approximation
Parameters
----------
z
Observations (numel_batch, dimension)
hidden_state
Hidden state (numel_batch, hidden_size)
es_num_samples
Number of samples drawn for each of w, w', w''
in energy score approximation
beta
Hyperparameter of the energy score, see the formula above
Returns
-------
loss
energy score (numel_batch)
"""
numel_batch, dimension = z.shape[0], z.shape[1]
# (numel_batch * dimension * es_num_samples x hidden_size)
hidden_state_repeat = hidden_state.repeat_interleave(repeats=es_num_samples, dim=0)
w = self.es_sample(hidden_state_repeat, dimension)
w_prime = self.es_sample(hidden_state_repeat, dimension)
first_term = (
torch.norm(
w.view(numel_batch, 1, es_num_samples, dimension)
- w_prime.view(numel_batch, es_num_samples, 1, dimension),
dim=-1,
)
** beta
)
mean_first_term = torch.mean(first_term.view(numel_batch, -1), dim=-1)
# since both tensors are huge (numel_batch*es_num_samples, dimension),
# delete to free up GPU memories
del w, w_prime
z_repeat = z.repeat_interleave(repeats=es_num_samples, dim=0)
w_bar = self.es_sample(hidden_state_repeat, dimension)
second_term = (
torch.norm(
w_bar.view(numel_batch, es_num_samples, dimension)
- z_repeat.view(numel_batch, es_num_samples, dimension),
dim=-1,
)
** beta
)
mean_second_term = torch.mean(second_term.view(numel_batch, -1), dim=-1)
loss = -0.5 * mean_first_term + mean_second_term
return loss
class MQF2Distribution(Distribution):
r"""
Distribution class for the model MQF2 proposed in the paper
``Multivariate Quantile Function Forecaster``
by Kan, Aubet, Januschowski, Park, Benidis, Ruthotto, Gasthaus
Parameters
----------
picnn
A SequentialNet instance of a
partially input convex neural network (picnn)
hidden_state
hidden_state obtained by unrolling the RNN encoder
shape = (batch_size, context_length, hidden_size) in training
shape = (batch_size, hidden_size) in inference
prediction_length
Length of the prediction horizon
is_energy_score
If True, use energy score as objective function
otherwise use maximum likelihood as
objective function (normalizing flows)
es_num_samples
Number of samples drawn to approximate the energy score
beta
Hyperparameter of the energy score (power of the two terms)
threshold_input
Clamping threshold of the (scaled) input when maximum
likelihood is used as objective function
this is used to make the forecaster more robust
to outliers in training samples
validate_args
Sets whether validation is enabled or disabled
For more details, refer to the descriptions in
torch.distributions.distribution.Distribution
"""
def __init__(
self,
picnn: torch.nn.Module,
hidden_state: torch.Tensor,
prediction_length: int,
is_energy_score: bool = True,
es_num_samples: int = 50,
beta: float = 1.0,
threshold_input: float = 100.0,
validate_args: bool = False,
) -> None:
self.picnn = picnn
self.hidden_state = hidden_state
self.prediction_length = prediction_length
self.is_energy_score = is_energy_score
self.es_num_samples = es_num_samples
self.beta = beta
self.threshold_input = threshold_input
super().__init__(batch_shape=self.batch_shape, validate_args=validate_args)
self.context_length = self.hidden_state.shape[-2] if len(self.hidden_state.shape) > 2 else 1
self.numel_batch = self.get_numel(self.batch_shape)
# mean zero and std one
mu = torch.tensor(0, dtype=hidden_state.dtype, device=hidden_state.device)
sigma = torch.ones_like(mu)
self.standard_normal = Normal(mu, sigma)
def stack_sliding_view(self, z: torch.Tensor) -> torch.Tensor:
"""
Auxiliary function for loss computation
Unfolds the observations by sliding a window of size prediction_length
over the observations z
Then, reshapes the observations into a 2-dimensional tensor for
further computation
Parameters
----------
z
A batch of time series with shape
(batch_size, context_length + prediction_length - 1)
Returns
-------
Tensor
Unfolded time series with shape
(batch_size * context_length, prediction_length)
"""
z = z.unfold(dimension=-1, size=self.prediction_length, step=1)
z = z.reshape(-1, z.shape[-1])
return z
def loss(self, z: torch.Tensor) -> torch.Tensor:
if self.is_energy_score:
return self.energy_score(z)
else:
return -self.log_prob(z)
def log_prob(self, z: torch.Tensor) -> torch.Tensor:
"""
Computes the log likelihood log(g(z)) + logdet(dg(z)/dz),
where g is the gradient of the picnn
Parameters
----------
z
A batch of time series with shape
(batch_size, context_length + prediciton_length - 1)
Returns
-------
loss
Tesnor of shape (batch_size * context_length,)
"""
z = torch.clamp(z, min=-self.threshold_input, max=self.threshold_input)
z = self.stack_sliding_view(z)
loss = self.picnn.logp(z, self.hidden_state.reshape(-1, self.hidden_state.shape[-1]))
return loss
def energy_score(self, z: torch.Tensor) -> torch.Tensor:
"""
Computes the (approximated) energy score sum_i ES(g,z_i),
where ES(g,z_i) =
-1/(2*es_num_samples^2) * sum_{w,w'} ||w-w'||_2^beta
+ 1/es_num_samples * sum_{w''} ||w''-z_i||_2^beta,
w's are samples drawn from the
quantile function g(., h_i) (gradient of picnn),
h_i is the hidden state associated with z_i,
and es_num_samples is the number of samples drawn
for each of w, w', w'' in energy score approximation
Parameters
----------
z
A batch of time series with shape
(batch_size, context_length + prediction_length - 1)
Returns
-------
loss
Tensor of shape (batch_size * context_length,)
"""
es_num_samples = self.es_num_samples
beta = self.beta
z = self.stack_sliding_view(z)
reshaped_hidden_state = self.hidden_state.reshape(-1, self.hidden_state.shape[-1])
loss = self.picnn.energy_score(z, reshaped_hidden_state, es_num_samples=es_num_samples, beta=beta)
return loss
def rsample(self, sample_shape: torch.Size = torch.Size()) -> torch.Tensor:
"""
Generates the sample paths
Parameters
----------
sample_shape
Shape of the samples
Returns
-------
sample_paths
Tesnor of shape (batch_size, *sample_shape, prediction_length)
"""
numel_batch = self.numel_batch
prediction_length = self.prediction_length
num_samples_per_batch = MQF2Distribution.get_numel(sample_shape)
num_samples = num_samples_per_batch * numel_batch
hidden_state_repeat = self.hidden_state.repeat_interleave(repeats=num_samples_per_batch, dim=0)
alpha = torch.rand(
(num_samples, prediction_length),
dtype=self.hidden_state.dtype,
device=self.hidden_state.device,
layout=self.hidden_state.layout,
).clamp(
min=1e-4, max=1 - 1e-4
) # prevent numerical issues by preventing to sample beyond 0.1% and 99.9% percentiles
samples = (
self.quantile(alpha, hidden_state_repeat)
.reshape((numel_batch,) + sample_shape + (prediction_length,))
.transpose(0, 1)
)
return samples
def quantile(self, alpha: torch.Tensor, hidden_state: Optional[torch.Tensor] = None) -> torch.Tensor:
"""
Generates the predicted paths associated with the quantile levels alpha
Parameters
----------
alpha
quantile levels,
shape = (batch_shape, prediction_length)
hidden_state
hidden_state, shape = (batch_shape, hidden_size)
Returns
-------
results
predicted paths of shape = (batch_shape, prediction_length)
"""
if hidden_state is None:
hidden_state = self.hidden_state
normal_quantile = self.standard_normal.icdf(alpha)
# In the energy score approach, we directly draw samples from picnn
# In the MLE (Normalizing flows) approach, we need to invert the picnn
# (go backward through the flow) to draw samples
if self.is_energy_score:
result = self.picnn(normal_quantile, context=hidden_state)
else:
result = self.picnn.reverse(normal_quantile, context=hidden_state)
return result
@staticmethod
def get_numel(tensor_shape: torch.Size) -> int:
# Auxiliary function
# compute number of elements specified in a torch.Size()
return torch.prod(torch.tensor(tensor_shape)).item()
@property
def batch_shape(self) -> torch.Size:
# last dimension is the hidden state size
return self.hidden_state.shape[:-1]
@property
def event_shape(self) -> Tuple:
return (self.prediction_length,)
@property
def event_dim(self) -> int:
return 1
class TransformedMQF2Distribution(TransformedDistribution):
def __init__(
self,
base_distribution: MQF2Distribution,
transforms: List[AffineTransform],
validate_args: bool = False,
) -> None:
super().__init__(base_distribution, transforms, validate_args=validate_args)
def scale_input(self, y: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:
# Auxiliary function to scale the observations
scale = torch.tensor(1.0, device=y.device)
for t in self.transforms[::-1]:
y = t._inverse(y)
for t in self.transforms:
if isinstance(t, AffineTransform):
scale = scale * t.scale
else:
scale = t(scale)
return y, scale
def repeat_scale(self, scale: torch.Tensor) -> torch.Tensor:
return scale.squeeze(-1).repeat_interleave(self.base_dist.context_length, 0)
def log_prob(self, y: torch.Tensor) -> torch.Tensor:
prediction_length = self.base_dist.prediction_length
z, scale = self.scale_input(y)
p = self.base_dist.log_prob(z)
repeated_scale = self.repeat_scale(scale)
# the log scale term can be omitted
# in optimization because it is a constant
# prediction_length is the dimension of each sample
return p - prediction_length * torch.log(repeated_scale)
def energy_score(self, y: torch.Tensor) -> torch.Tensor:
beta = self.base_dist.beta
z, scale = self.scale_input(y)
loss = self.base_dist.energy_score(z)
repeated_scale = self.repeat_scale(scale)
return loss * (repeated_scale**beta)
def quantile(self, alpha: torch.Tensor, hidden_state: Optional[torch.Tensor] = None) -> torch.Tensor:
result = self.base_dist.quantile(alpha, hidden_state=hidden_state)
result = result.reshape(self.base_dist.hidden_state.size(0), -1, self.base_dist.prediction_length).transpose(
0, 1
)
for transform in self.transforms:
# transform separate for each prediction horizon
result = transform(result)
return result.transpose(0, 1).reshape_as(alpha)
|
PypiClean
|
/glance-26.0.0.0b3.tar.gz/glance-26.0.0.0b3/doc/source/admin/policies.rst
|
..
Copyright 2012 OpenStack Foundation
All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
Policies
========
.. warning::
JSON formatted policy file is deprecated since Glance 22.0.0 (Wallaby).
This `oslopolicy-convert-json-to-yaml`__ tool will migrate your existing
JSON-formatted policy file to YAML in a backward-compatible way.
.. __: https://docs.openstack.org/oslo.policy/latest/cli/oslopolicy-convert-json-to-yaml.html
Glance's public API calls may be restricted to certain sets of users using a
policy configuration file. This document explains exactly how policies are
configured and what they apply to.
A policy is composed of a set of rules that are used by the policy "Brain" in
determining if a particular action may be performed by the authorized tenant.
Constructing a Policy Configuration File
----------------------------------------
A policy configuration file is a simply JSON object that contain sets of
rules. Each top-level key is the name of a rule. Each rule
is a string that describes an action that may be performed in the Glance API.
The actions that may have a rule enforced on them are:
* ``get_images`` - List available image entities
* ``GET /v1/images``
* ``GET /v1/images/detail``
* ``GET /v2/images``
* ``get_image`` - Retrieve a specific image entity
* ``HEAD /v1/images/<IMAGE_ID>``
* ``GET /v1/images/<IMAGE_ID>``
* ``GET /v2/images/<IMAGE_ID>``
* ``download_image`` - Download binary image data
* ``GET /v1/images/<IMAGE_ID>``
* ``GET /v2/images/<IMAGE_ID>/file``
* ``upload_image`` - Upload binary image data
* ``POST /v1/images``
* ``PUT /v1/images/<IMAGE_ID>``
* ``PUT /v2/images/<IMAGE_ID>/file``
* ``copy_from`` - Copy binary image data from URL
* ``POST /v1/images``
* ``PUT /v1/images/<IMAGE_ID>``
* ``add_image`` - Create an image entity
* ``POST /v1/images``
* ``POST /v2/images``
* ``modify_image`` - Update an image entity
* ``PUT /v1/images/<IMAGE_ID>``
* ``PUT /v2/images/<IMAGE_ID>``
* ``publicize_image`` - Create or update public images
* ``POST /v1/images`` with attribute ``is_public`` = ``true``
* ``PUT /v1/images/<IMAGE_ID>`` with attribute ``is_public`` = ``true``
* ``POST /v2/images`` with attribute ``visibility`` = ``public``
* ``PUT /v2/images/<IMAGE_ID>`` with attribute ``visibility`` = ``public``
* ``communitize_image`` - Create or update community images
* ``POST /v2/images`` with attribute ``visibility`` = ``community``
* ``PUT /v2/images/<IMAGE_ID>`` with attribute ``visibility`` = ``community``
* ``delete_image`` - Delete an image entity and associated binary data
* ``DELETE /v1/images/<IMAGE_ID>``
* ``DELETE /v2/images/<IMAGE_ID>``
* ``add_member`` - Add a membership to the member repo of an image
* ``POST /v2/images/<IMAGE_ID>/members``
* ``get_members`` - List the members of an image
* ``GET /v1/images/<IMAGE_ID>/members``
* ``GET /v2/images/<IMAGE_ID>/members``
* ``delete_member`` - Delete a membership of an image
* ``DELETE /v1/images/<IMAGE_ID>/members/<MEMBER_ID>``
* ``DELETE /v2/images/<IMAGE_ID>/members/<MEMBER_ID>``
* ``modify_member`` - Create or update the membership of an image
* ``PUT /v1/images/<IMAGE_ID>/members/<MEMBER_ID>``
* ``PUT /v1/images/<IMAGE_ID>/members``
* ``POST /v2/images/<IMAGE_ID>/members``
* ``PUT /v2/images/<IMAGE_ID>/members/<MEMBER_ID>``
* ``manage_image_cache`` - Allowed to use the image cache management API
To limit an action to a particular role or roles, you list the roles like so ::
{
"delete_image": ["role:admin", "role:superuser"]
}
The above would add a rule that only allowed users that had roles of either
"admin" or "superuser" to delete an image.
Writing Rules
-------------
Role checks are going to continue to work exactly as they already do. If the
role defined in the check is one that the user holds, then that will pass,
e.g., ``role:admin``.
To write a generic rule, you need to know that there are three values provided
by Glance that can be used in a rule on the left side of the colon (``:``).
Those values are the current user's credentials in the form of:
- role
- tenant
- owner
The left side of the colon can also contain any value that Python can
understand, e.g.,:
- ``True``
- ``False``
- ``"a string"``
- &c.
Using ``tenant`` and ``owner`` will only work with images. Consider the
following rule::
tenant:%(owner)s
This will use the ``tenant`` value of the currently authenticated user. It
will also use ``owner`` from the image it is acting upon. If those two
values are equivalent the check will pass. All attributes on an image (as well
as extra image properties) are available for use on the right side of the
colon. The most useful are the following:
- ``owner``
- ``protected``
- ``is_public``
Therefore, you could construct a set of rules like the following::
{
"not_protected": "False:%(protected)s",
"is_owner": "tenant:%(owner)s",
"is_owner_or_admin": "rule:is_owner or role:admin",
"not_protected_and_is_owner": "rule:not_protected and rule:is_owner",
"get_image": "rule:is_owner_or_admin",
"delete_image": "rule:not_protected_and_is_owner",
"add_member": "rule:not_protected_and_is_owner"
}
Examples
--------
Example 1. (The default policy configuration)
::
{
"default": ""
}
Note that an empty JSON list means that all methods of the
Glance API are callable by anyone.
Example 2. Disallow modification calls to non-admins
::
{
"default": "",
"add_image": "role:admin",
"modify_image": "role:admin",
"delete_image": "role:admin"
}
|
PypiClean
|
/toolcli-0.6.5.tar.gz/toolcli-0.6.5/docs/command_execution_sequence.md
|
## Command Execution Sequence
- starting with a raw command, command execution is similar to the following:
```python
command_sequence = parse_command_sequence(raw_command)
command_spec = resolve_command_spec(command_index[command_sequence])
command_args = parse_command_args(raw_command, command_spec['args'])
_execute_middleware(config['pre_middlewares'], command_args)
f = resolve_command_function(command_spec['f'])
f(**command_args)
_execute_middleware(config['post_middlewares'], command_args)
```
|
PypiClean
|
/dimahelpers-0.1.0.tar.gz/dimahelpers-0.1.0/doc/docs/usage-cli.md
|
# Command-Line Usage
Once installation is complete, an executable will be added to PATH which can be accessed as below:
**Linux**
`dima-cli -h`
**Windows**
`dima-cli.exe -h`
**MacOS**
`dima-cli -h`
## Basic Usage
`dima-cli -i sequences.fasta -o output.json -l 9`
`dima-cli -i sequences.fasta | grep supports`
### Example Output
```
[
{
"position":1,
"entropy":1.0002713744986218,
"supports":2,
"variants":[
{
"position":1,
"sequence":"SKGKRTVDL",
"count":1,
"incidence":50.0,
"motif_short":"I",
"motif_long":"Index"
},
{
"position":1,
"sequence":"FHWLMLNPN",
"count":1,
"incidence":50.0,
"motif_short":"Ma",
"motif_long":"Major"
}
],
"kmer_types":{
"incidence":50.0,
"types":[
"FHWLMLNPN"
]
}
}
]
```
## Advanced Usage
The flag --he/--header along with the -f/--format header can be used to generate data for each variant using the metadata from the fasta sequence header.
`dima-cli -i sequences.fasta -o output.json -he -f "(type)|(id)|(strain)"`
Each componant (ex: id, strain, country, etc)of the header needs to be wrapped in brackets. Any separator (Ex: |, /, _, etc) can be used.
!!! Consideration caution
How to deal with missing information in the FASTA header?. Glad you asked!.
Consider the below FASTA file:
```
>A|CY021716|A/AA/Huston/1945|
MERIKELRNLMSQSRTREILTKTTVDHMAIIKKYTSGRQEKNPSLRMKWMMAMKYPITADKRITEMIPER
>A|CY020292|A/AA/Marton/1943|USA
NEQGQTLWSKMNDAGSDRVMVSPLAVTWWNRNGPMTSTVHYPKIYKTYFEKVERLKHGTFGPVHFRNQVK
>A|CY083917|A/Aalborg/INS132/2009|Denmark
MERIKELRDLMSQSRTREILTKTTVDHMAIIKKYTSGRQEKNPALRMKWMMAMRYPITADKRIMDMIPER
```
The first sequence does not contain the Country of Collection. By default, DiMA would raise an exception `
HeaderItemEmpty`.
To override this behavior, the `no_header_error` parameter can be used to replace missing
header information with "Unknown".
**PS:** These kind of sequences with missing information are common when exported from NCBI VIrus.
### Example Output
```
[
{
"position":1,
"entropy":1.0001724373828909,
"supports":2,
"variants":[
{
"position":1,
"sequence":"SKGKRTVDL",
"count":1,
"incidence":50.0,
"motif_short":"I",
"motif_long":"Index",
"type":[
"tr"
],
"accession":[
"A0A2Z4MTJ4"
],
"strain":[
"A0A2Z4MTJ4_9HIV2_Envelope_glycoprotein_gp160_OS_Human_immunodeficiency_virus_2_OX_11709_GN_env_PE_4_SV_1"
]
},
{
"position":1,
"sequence":"FHWLMLNPN",
"count":1,
"incidence":50.0,
"motif_short":"Ma",
"motif_long":"Major",
"type":[
"tr"
],
"accession":[
"A0A0K2GVL2"
],
"strain":[
"A0A2Z4MTJ4_9HIV2_Envelope_glycoprotein_gp160_OS_Human_immunodeficiency_virus_2_OX_11709_GN_env_PE_4_SV_1"
]
}
],
"kmer_types":{
"incidence":50.0,
"types":[
"FHWLMLNPN"
]
}
}
]
```
### Command-Line Arguments
| Argument | Type | Default | Example | Description |
|------------------ |--------- |--------- |----------------------------------------------------------------------------------------------------------- |----------------------------------------------------------------------------------- |
| -h | N/A | N/A | `dima-cli -h` | Prints a summary of all available command-line arguments. |
| -i | String | N/A | `dima-cli -i "/path/to/alignment.fasta"` | Absolute path to the aligned sequences file in FASTA format. |
| -o | String | N/A | `dima-cli -i "/path/to/alignment.fasta" -o output.json` | Absolute path to the output JSON file. |
| -l | Integer | 9 | `dima-cli -i "/path/to/alignment.fasta" -l 12` | The length of the generated k-mers. |
| -s | Integer | 10000 | `dima-cli -i "/path/to/alignment.fasta" -s 20000` | Maximum number of samples use when calculating entropy. |
| -it | Integer | 10 | `dima-cli -i "/path/to/alignment.fasta" -it 100` | Maximum number of iterations used when calculating entropy. |
| -he | Boolean | False | `dima-cli -i "/path/to/alignment.fasta" -he -f "(type)\|(accession)\|(strain)\|(country)"` | Enables decoding of the FASTA headers to derive details for each generated k-mer. |
| -f | String | N/A | `dima-cli -i "/path/to/alignment.fasta" -he -f "(type)\|(accession)\|(strain)\|(country)"` | The format of the FASTA header in the FASTA Multiple Sequence Alignment. |
| -no_header_error | Boolean | False | `dima-cli -i "/path/to/alignment.fasta" -he -f "(type)\|(accession)\|(strain)\|(country)" -no_header_error` | Whether to raise an error if empty items are found in any of the FASTA headers. |
### More Examples
`dima-cli -i sequences.fasta -o output.json -he -f "(ncbid)/(strain)/(host)/(country)"`
`dima-cli -i sequences.fasta -o output.json -he -f "(ncbid)/(strain)/(host)|(country)"`
`dima-cli -i sequences.fasta -o output.json -he -f "(ab)/(cde)/(fghi)/(jklm)"`
`dima-cli -i sequences.fasta -o output.json -he -f "(ab)/(cde)/(fghi)/(jklm) -no_header_error"`
|
PypiClean
|
/monk_keras_cuda101_test-0.0.1-py3-none-any.whl/monk/pytorch/losses/retrieve_loss.py
|
from monk.pytorch.losses.imports import *
from monk.system.imports import *
@accepts(dict, post_trace=False)
#@TraceFunction(trace_args=False, trace_rv=False)
def retrieve_loss(system_dict):
'''
Retrieve loss post state changes
Args:
system_dict (dict): System dictionary storing experiment state and set variables
Returns:
dict: updated system dict
'''
system_dict["local"]["criterion"] = system_dict["hyper-parameters"]["loss"]["name"];
name = system_dict["local"]["criterion"];
if(name == "l1"):
system_dict["local"]["criterion"] = torch.nn.L1Loss(
reduction='mean');
elif(name == "l2"):
system_dict["local"]["criterion"] = torch.nn.MSELoss(
reduction='mean');
elif(name == "softmaxcrossentropy"):
system_dict["local"]["criterion"] = torch.nn.CrossEntropyLoss(
weight=system_dict["hyper-parameters"]["loss"]["params"]["weight"],
reduction='mean');
elif(name == "crossentropy"):
system_dict["local"]["criterion"] = torch.nn.NLLLoss(
weight=system_dict["hyper-parameters"]["loss"]["params"]["weight"],
reduction='mean');
elif(name == "sigmoidbinarycrossentropy"):
system_dict["local"]["criterion"] = torch.nn.BCEWithLogitsLoss(
weight=system_dict["hyper-parameters"]["loss"]["params"]["weight"],
reduction='mean');
elif(name == "binarycrossentropy"):
system_dict["local"]["criterion"] = torch.nn.BCELoss(
weight=system_dict["hyper-parameters"]["loss"]["params"]["weight"],
reduction='mean');
elif(name == "kldiv"):
system_dict["local"]["criterion"] = torch.nn.KLDivLoss(
reduction='mean');
elif(name == "poissonnll"):
system_dict["local"]["criterion"] = torch.nn.PoissonNLLLoss(
log_input=system_dict["hyper-parameters"]["loss"]["params"]["log_pre_applied"],
reduction='mean');
elif(name == "huber"):
system_dict["local"]["criterion"] = torch.nn.SmoothL1Loss(
reduction='mean');
elif(name == "hinge"):
system_dict["local"]["criterion"] = torch.nn.HingeEmbeddingLoss(
margin=system_dict["hyper-parameters"]["loss"]["params"]["margin"],
reduction='mean');
elif(name == "squaredhinge"):
system_dict["local"]["criterion"] = torch.nn.SoftMarginLoss(
reduction='mean');
elif(name == "multimargin"):
system_dict["local"]["criterion"] = torch.nn.MultiMarginLoss(
p=1,
margin=system_dict["hyper-parameters"]["loss"]["params"]["margin"],
reduction='mean');
elif(name == "squaredmultimargin"):
system_dict["local"]["criterion"] = torch.nn.MultiMarginLoss(
p=2,
margin=system_dict["hyper-parameters"]["loss"]["params"]["margin"],
reduction='mean');
elif(name == "multilabelmargin"):
system_dict["local"]["criterion"] = torch.nn.MultiLabelMarginLoss(
reduction='mean');
elif(name == "multilabelsoftmargin"):
system_dict["local"]["criterion"] = torch.nn.MultiLabelSoftMarginLoss(
reduction='mean');
return system_dict;
|
PypiClean
|
/lib_path-1.0.4.tar.gz/lib_path-1.0.4/ISSUE_TEMPLATE.md
|
* **I'm submitting a ... **
- [ ] bug report
- [ ] feature request
- [ ] support request
* **Do you want to request a *feature* or report a *bug*?**
* **What is the current behavior?**
* **If the current behavior is a bug, please provide the steps to reproduce and if possible a minimal demo of the problem
* **What is the expected behavior?**
* **What is the motivation / use case for changing the behavior?**
* **Please tell us about your environment:**
- Release Number of the Repository used :
- Python Version :
- OS, OS Version :
* **Other information** (e.g. detailed explanation, stack traces, related issues, suggestions how to fix, links for us to have context, eg. stackoverflow
, gitter, etc)
|
PypiClean
|
/django_adminx-2.0.3-py3-none-any.whl/xadmin/plugins/bookmark.py
|
from django.contrib.contenttypes.models import ContentType
from django.urls.base import reverse
from django.db import transaction
from django.db.models import Q
from django.forms import ModelChoiceField
from django.http import QueryDict
from django.template import loader
from django.utils.decorators import method_decorator
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.csrf import csrf_protect
from xadmin.filters import FILTER_PREFIX, SEARCH_VAR
from xadmin.plugins.relate import RELATE_PREFIX
from xadmin.plugins.utils import get_context_dict
from xadmin.sites import site
from xadmin.views import ModelAdminView, BaseAdminPlugin, ListAdminView
from xadmin.views.list import COL_LIST_VAR, ORDER_VAR
from xadmin.views.dashboard import widget_manager, BaseWidget, PartialBaseWidget
from xadmin.models import Bookmark
csrf_protect_m = method_decorator(csrf_protect)
class BookmarkPlugin(BaseAdminPlugin):
# [{'title': "Female", 'query': {'gender': True}, 'order': ('-age'), 'cols': ('first_name', 'age', 'phones'), 'search': 'Tom'}]
list_bookmarks = []
show_bookmarks = True
def has_change_permission(self, obj=None):
if not obj or self.user.is_superuser:
return True
else:
return obj.user == self.user
def get_context(self, context):
if not self.show_bookmarks:
return context
bookmarks = []
current_qs = '&'.join([
'%s=%s' % (k, v)
for k, v in sorted(filter(
lambda i: bool(i[1] and (
i[0] in (COL_LIST_VAR, ORDER_VAR, SEARCH_VAR)
or i[0].startswith(FILTER_PREFIX)
or i[0].startswith(RELATE_PREFIX)
)),
self.request.GET.items()
))
])
model_info = (self.opts.app_label, self.opts.model_name)
has_selected = False
menu_title = _(u"Bookmark")
list_base_url = reverse('xadmin:%s_%s_changelist' %
model_info, current_app=self.admin_site.name)
# local bookmarks
for bk in self.list_bookmarks:
title = bk['title']
params = dict([
(FILTER_PREFIX + k, v)
for (k, v) in bk['query'].items()
])
if 'order' in bk:
params[ORDER_VAR] = '.'.join(bk['order'])
if 'cols' in bk:
params[COL_LIST_VAR] = '.'.join(bk['cols'])
if 'search' in bk:
params[SEARCH_VAR] = bk['search']
def check_item(i):
return bool(i[1]) or i[1] == False
bk_qs = '&'.join([
'%s=%s' % (k, v)
for k, v in sorted(filter(check_item, params.items()))
])
url = list_base_url + '?' + bk_qs
selected = (current_qs == bk_qs)
bookmarks.append(
{'title': title, 'selected': selected, 'url': url})
if selected:
menu_title = title
has_selected = True
content_type = ContentType.objects.get_for_model(self.model)
bk_model_info = (Bookmark._meta.app_label, Bookmark._meta.model_name)
bookmarks_queryset = Bookmark.objects.filter(
content_type=content_type,
url_name='xadmin:%s_%s_changelist' % model_info
).filter(Q(user=self.user) | Q(is_share=True))
for bk in bookmarks_queryset:
selected = (current_qs == bk.query)
if self.has_change_permission(bk):
change_or_detail = 'change'
else:
change_or_detail = 'detail'
bookmarks.append({'title': bk.title, 'selected': selected, 'url': bk.url, 'edit_url':
reverse('xadmin:%s_%s_%s' % (bk_model_info[0], bk_model_info[1], change_or_detail),
args=(bk.id,))})
if selected:
menu_title = bk.title
has_selected = True
post_url = reverse('xadmin:%s_%s_bookmark' % model_info,
current_app=self.admin_site.name)
new_context = {
'bk_menu_title': menu_title,
'bk_bookmarks': bookmarks,
'bk_current_qs': current_qs,
'bk_has_selected': has_selected,
'bk_list_base_url': list_base_url,
'bk_post_url': post_url,
'has_add_permission_bookmark': self.admin_view.request.user.has_perm('xadmin.add_bookmark'),
'has_change_permission_bookmark': self.admin_view.request.user.has_perm('xadmin.change_bookmark')
}
context.update(new_context)
return context
# Media
def get_media(self, media):
return media + self.vendor('xadmin.plugin.bookmark.js')
# Block Views
def block_nav_menu(self, context, nodes):
if self.show_bookmarks:
nodes.insert(0, loader.render_to_string('xadmin/blocks/model_list.nav_menu.bookmarks.html',
context=get_context_dict(context)))
class BookmarkView(ModelAdminView):
@csrf_protect_m
@transaction.atomic
def post(self, request):
model_info = (self.opts.app_label, self.opts.model_name)
url_name = 'xadmin:%s_%s_changelist' % model_info
bookmark = Bookmark(
content_type=ContentType.objects.get_for_model(self.model),
title=request.POST[
'title'], user=self.user, query=request.POST.get('query', ''),
is_share=request.POST.get('is_share', 0), url_name=url_name)
bookmark.save()
content = {'title': bookmark.title, 'url': bookmark.url}
return self.render_response(content)
class BookmarkAdmin(object):
model_icon = 'fa fa-book'
list_display = ('title', 'user', 'url_name', 'query')
list_display_links = ('title',)
user_fields = ['user']
hidden_menu = True
def queryset(self):
if self.user.is_superuser:
return Bookmark.objects.all()
return Bookmark.objects.filter(Q(user=self.user) | Q(is_share=True))
def get_list_display(self):
list_display = super(BookmarkAdmin, self).get_list_display()
if not self.user.is_superuser:
list_display.remove('user')
return list_display
def has_change_permission(self, obj=None):
if not obj or self.user.is_superuser:
return True
else:
return obj.user == self.user
@widget_manager.register
class BookmarkWidget(PartialBaseWidget):
widget_type = _('bookmark')
widget_icon = 'fa fa-bookmark'
description = _(
'Bookmark Widget, can show user\'s bookmark list data in widget.')
template = "xadmin/widgets/list.html"
bookmark = ModelChoiceField(
label=_('Bookmark'), queryset=Bookmark.objects.all(), required=False)
def setup(self):
BaseWidget.setup(self)
bookmark = self.cleaned_data['bookmark']
model = bookmark.content_type.model_class()
data = QueryDict(bookmark.query)
self.bookmark = bookmark
if not self.title:
self.title = smart_text(bookmark)
req = self.make_get_request("", data.items())
self.list_view = self.get_view_class(
ListAdminView, model, list_per_page=10, list_editable=[])(req)
def has_perm(self):
return True
def context(self, context):
list_view = self.list_view
list_view.make_result_list()
base_fields = list_view.base_list_display
if len(base_fields) > 5:
base_fields = base_fields[0:5]
context['result_headers'] = [c for c in list_view.result_headers(
).cells if c.field_name in base_fields]
context['results'] = [
[o for i, o in enumerate(filter(
lambda c: c.field_name in base_fields,
r.cells
))]
for r in list_view.results()
]
context['result_count'] = list_view.result_count
context['page_url'] = self.bookmark.url
site.register(Bookmark, BookmarkAdmin)
site.register_plugin(BookmarkPlugin, ListAdminView)
site.register_modelview(r'^bookmark/$', BookmarkView, name='%s_%s_bookmark')
|
PypiClean
|
/pacman_ipam-0.9.6.tar.gz/pacman_ipam-0.9.6/pacman/utils/utils.py
|
import datetime, os, configparser
msgbuffer = []
guibuffer = []
def logmsg(msg, toFile = False, toGUI = True):
"""
Logs message to GUI and text file
Parameters
----------
msg : string
Message to log.
toFile : boolean, optional
Whether to write to log file or only to window console. The default is False.
toGUI : TYPE, optional
Whether to write to window console. The default is True.
Returns
-------
None.
"""
global msgbuffer, output_dir, experiment
if(toGUI):
guibuffer.append(msg)
T = datetime.now().strftime("%d/%m (%H:%M:%S)")
frmstr = f"T:{T}: {msg}"
if(toFile):
msgbuffer.append(frmstr)
print(frmstr)
if(len(msgbuffer) > 10 or "Experiment Finished" in msg or "Experiment cancelled" in msg):
fp = output_dir
with open(fp + "\\" + f"{experiment}_log.txt", mode='a') as file_object:
for msgiter in msgbuffer:
file_object.write('%s\n' % msgiter)
msgbuffer.clear()
def read_ini_file(file_path):
if not os.path.isfile(file_path):
raise FileNotFoundError(f"The PACsettings {file_path} was not found.")
config = configparser.ConfigParser()
config.read(file_path)
ini_dict = {}
for section in config.sections():
section_dict = {}
print(f"Reading in {section}")
for key, value in config.items(section):
if value.lower() == 'true':
section_dict[key] = True
elif value.lower() == 'false':
section_dict[key] = False
else:
section_dict[key] = value
print(f"{key}: {section_dict[key]}")
ini_dict[section] = section_dict
return ini_dict
# #Size filter in pixels, might need to change based on species and objective
# MINAREA = 10
# MAXAREA = 100
# #Particle distance filter. Things that are closer together get merged
# MINCELLDISTANCE = 8
# #Circularity filter. 1 demands perfect circularity, 0 means no circularity
# MINCIRC = 0.6
# #Intertia filter. 1
# MININERTIA = 0.4
# params = cv2.SimpleBlobDetector_Params()
# # Change thresholds
# params.minThreshold = 0
# params.maxThreshold = 255
# # Filter by Area.
# params.filterByArea = True
# params.minArea = MINAREA
# params.maxArea = MAXAREA
# # Filter by Circularity
# params.filterByCircularity = True
# params.minCircularity = MINCIRC
# params.maxCircularity = 1
# # Filter by InertiaRatio
# params.filterByInertia = True
# params.minInertiaRatio = MININERTIA
# params.maxInertiaRatio = 1
# # Distance Between Particles
# params.minDistBetweenBlobs = MINCELLDISTANCE
# params.filterByColor = False
# def runandshow(img,sparams = None):
# if sparams is None:
# global params
# sparams = params
# detector = cv2.SimpleBlobDetector_create(sparams)
# img = img[50:img.shape[0]-50,50*2:img.shape[1]-50*2]
# img = cv2.GaussianBlur(img,(3,3),0)
# #thresh_binary = cv2.adaptiveThreshold(img, 1, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY, 31, 6)
# #ret1, thresh_binary = cv2.threshold(img,25,1,cv2.THRESH_BINARY)
# ret3,thresh_binary = cv2.threshold(img,0,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
# binimg = thresh_binary
# th = cv2.multiply(img, thresh_binary)
# reversed_binimg =(1-binimg)*255
# keypoints = detector.detect(th)
# im_with_kp = cv2.drawKeypoints(th, keypoints, np.array([]),(0,0,255), cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)
# img_disp = np.hstack((cv2.cvtColor(img,cv2.COLOR_GRAY2BGR),cv2.cvtColor(thresh_binary,cv2.COLOR_GRAY2BGR),im_with_kp))
# cv2.imshow("KP",img_disp)
# print(len(keypoints))
# #return keypoints
# def make_z_stack(step=1, extent = 10):
# import skimage.io, tifffile,os
# import SeriCom
# import IPAMRH
# IPAMC = IPAMRH.AIPam()
# SeriC = SeriCom.SC()
# IWE_fp = 'C:\ImagingPamGigE\Data_RGB'
# SeriC.msg_resp("PZ 0")
# IPAMC.send_command("Select Image = ", "Ft")
# IPAMC.send_command("Ft only","")
# IPAMC.send_command("Save Tiff Image = ", "FocusImg0")
# Positions = [0]
# for fac in range(-1,2,2):
# for i in range(1,extent+1,step):
# foc = SeriC.get_focus()
# print(f"Taking image {i*fac} ")
# time.sleep(2.5)
# print(f"Position: {foc}\n")
# Positions.append(foc)
# time.sleep(8)
# IPAMC.send_command("Select Image = ", "Ft")
# IPAMC.send_command("Ft only","")
# IPAMC.send_command("Save Tiff Image = ", f"FocusImg{i*fac}")
# SeriC.go_to_z(i*fac)
# time.sleep(2.5)
# SeriC.go_to_z(i*fac)
# time.sleep(2.5)
# SeriC.go_to_z(0)
# focus_stack_col = skimage.io.imread_collection(IWE_fp+"\\FocusImg*",plugin = 'tifffile')
# focus_stack_imgs = focus_stack_col.concatenate()
# tifffile.imwrite(IWE_fp+"\\"+"Z_Stack.tif",focus_stack_imgs,imagej = True)
# with open (IWE_fp+"\\focus_pos.txt", mode ="a") as file:
# for focpos in Positions:
# file.write('%s\n' % focpos)
# for fp in focus_stack_col.files:
# if (os.path.isfile(fp)):
# os.remove(fp)
# else:
# print(f"File at {fp} not found")
# def rAFD(IMGs, AF = None,xticks=None):
# import AutoFocus as AFM
# if AF is None:
# AF = AFM.AFMan(len(IMGs-1),1)
# AF.compute_Initial_Setup([IMGs[0]],True)
# for idx,img in enumerate(IMGs[1:]):
# AF.perform_Autofocus(img, idx+1, 0, True)
# #AF.calc_score()
# fig,(ax2,ax3,ax4,ax5) = plt.subplots(4)
# fig.tight_layout(pad=3.0)
# #ax1.set_title('Avg_Scaled_Direction')
# #ax1.plot(AF.FSComponents[:,0,0])
# ax2.set_title('PLLS')
# ax2.plot(AF.FSComponents[:,0,1]/AF.FSComponents[0,0,1])
# ax3.set_title('Theta')
# ax3.plot(AF.FSComponents[:,0,0]/AF.FSComponents[0,0,0])
# ax4.set_title('Focus Score and Z corr')
# ax4.plot(AF.FS[:],color = "red")
# ax5.plot(AF.corrections[:], color = "green")
# if(xticks == None):
# plt.xticks(np.arange(1,len(IMGs),int(len(IMGs)/10)))
# else:
# plt.xticks(xticks)
# def AF_Scoring(IMGs, AF = None,xticks=None,fp=None,plot=False):
# import AFM
# import ipdb
# if AF is None:
# AF = AFM.AFMan(len(IMGs),1,LPL=True,PLLS=True)
# AF.compute_initial_setup([IMGs[0]],True)
# print("Init done")
# for idx,img in enumerate(IMGs[1:]):
# AF.do_autofocus(img, idx+1, 0, True)
# #AF.calc_score()
# if(xticks is None):
# xticks = range(0,len(IMGs)-1)
# if(plot):
# fig,(ax1,ax2) = plt.subplots(2)
# fig.suptitle("Autofocus Scoring")
# fig.tight_layout(pad=3.0)
# ax1.set_ylabel('Focus score')
# ax1.scatter(xticks, AF.FSComponents[:,0,1]/AF.FSComponents[0,0,1])
# ax2.set_ylabel('Magnitude of Z correction')
# ax2.scatter(xticks, AF.corrections[:])
# plt.xlabel("Z distance from focal plane")
# fig.savefig(fname = fp+"/Focus_Score")
# plt.close()
# #return xticks,AF.FSComponents[:,0,1]/AF.FSComponents[0,0,1], AF.corrections[:]
# return xticks,AF.FS, [x[0] for x in AF.corrections]
# def read_xticks(fp):
# num_list = []
# with open(fp,'r') as fh:
# for line in fh:
# num_list.append((float(line)))
# return num_list
# def read_cor_xticks_stack(fp):
# import tifffile
# import numpy as np
# positions = read_xticks(fp+"/focus_pos.txt")
# stack = tifffile.imread(fp+"/Z_stack.tif")
# cor_pos =[0]
# cor_pos.extend(positions[10:0:-1])
# cor_pos.extend(positions[11:])
# cor_stack = np.append(stack[0,None],stack[10:0:-1][:,:],axis=0)
# cor_stack = np.append(cor_stack,stack[11:][:,:],axis=0)
# return cor_pos,cor_stack
# def perform_scoring(fp):
# pos,imgs = read_cor_xticks_stack(fp)
# xticks, fs, z_cor = AF_Scoring(IMGs = imgs,xticks=pos,fp=fp)
# if(sum(fs) == 0):
# return z_cor,xticks
# with open(fp+".txt", mode = "w") as fh:
# for i in range(len(xticks)):
# fh.write(str(xticks[i]) + "," + str(fs[i]) + "," + str(z_cor[i])+"\n")
# #plt.plot(xticks,z_cor)
# return z_cor,xticks,fs
# #The below function attempts to correct in a single position. It prints the
# #iteration and the correction applied. Micromanager 2.0 and imagingwin needs to be running, and the ipamremotescript
# #Proper useage/commands are as follows (iphython console). Make sure you are running in proper working directory:
# def single_pos_Autofocus(IPAM, InitImg, factor, AF = None):
# SC = StageCom.StageCom()
# import cv2
# import IPAMRH
# IWE_fp = "C:/"
# IPAMR = IPAMRH.AIPam()
# IPAMR.send_command("Ft only","")
# IPAMR.send_command("Select Image", "Ft")
# IPAMR.send_command("Save Tiff Image = ", "FocusImg")
# #imgsrc = cv2.imread("FocusImg.tif", cv2.IMREAD_GRAYSCALE)
# imgsrc=cv2.imread(IWE_fp+"\FocusImg.tif",cv2.IMREAD_GRAYSCALE)
# #single_pos_Autofocus(IPAMR,imgsrc,1)
# if AF is None:
# AF = AFM.AFMan(1,10)
# AF.compute_initial_setup([InitImg],True)
# for i in range(100):
# time.sleep(2.0)
# dz = AF.do_autofocus(IPAM,1,0,True)
# print(str(i) + "\t" + str(dz))
# correction = factor*dz
# SC.move_focus(correction)
# def SSIM(imgstack):
# from skimage.metrics import structural_similarity
# import tifffile
# scores = []
# if(type(imgstack) == str):
# #If we don't get an tiff-stack, assume its coming directly as a ImagingWin output stack
# imgstack = tifffile.imread(imgstack)
# imgstack = imgstack[4:]
# imgstack = make_Yield_Images(imgstack)
# for img in imgstack:
# score = structural_similarity(imgstack[0],img)
# scores.append(score)
# for score in scores:
# print(f"SSIM: {score}")
# return scores
# def make_Yield_Images(img_stack):
# """
# Parameters
# ----------
# img_stack : Numpy, shape = 2x, 480, 640
# Input image stack of only Fo/Fm.
# Returns
# -------
# Yield image stack: x,480,640. Range of values are 0.0 to 1.0
# """
# #Assumes the initial 4 images have been removed
# Fo = img_stack[::2]
# Fm = img_stack[1::2]
# Fm = np.asarray(Fm,dtype=np.int8)
# #Yield is defined as Fv/Fm or (Fm-Fo)/Fm
# Yield = []
# for i in range(len(Fo)):
# Mask = np.where(Fm[i] > int(0.05*256),1,0)
# #Mask = Mask.astype(np.uint8)
# #Mask = cv2.medianBlur(Mask,3)
# #Mask = np.where(Mask>0,1,0)
# Fv = np.subtract(Fm[i],Fo[i],dtype = np.int8)
# #Floor to zero
# Fv = np.multiply(np.clip(Fv,0,255),Mask)
# #ipdb.set_trace()
# #cYield = np.divide(Fv,Fm[i],out=np.zeros_like(Fv),where=Fm[i]!=0)
# cYield = np.divide(Fv.astype(np.float16),Fm[i].astype(np.float16),out=np.zeros_like(Fv, dtype=np.float16),where=Fm[i]!=0)
# Yield.append(cYield)
# return np.asarray(Yield)
# import matplotlib.pyplot as plt
# dirs = ["C:/Users/ollpo511/Documents/U-Print/Behrendt/Z_Stacks/03_10_Chalmy_10x",
# "C:/Users/ollpo511/Documents/U-Print/Behrendt/Z_Stacks/16_09_4019",
# "C:/Users/ollpo511/Documents/U-Print/Behrendt/Z_Stacks/28_09_Chlamy_10x",
# "C:/Users/ollpo511/Documents/U-Print/Behrendt/Z_Stacks/29_09_421_10x",
# "C:/Users/ollpo511/Documents/U-Print/Behrendt/Z_Stacks/30_09_421_10x/1"]
# #"C:/Users/ollpo511/Documents/U-Print/Behrendt/Z_Stacks/30_09_421_10x/2"]
# plt.close()
# fs = []
# xticks = []
# zcors = []
# for sd in dirs:
# z,x,f = perform_scoring(sd)
# xticks.append(x)
# zcors.append(z)
# fs.append(f)
# for idx in range(0,len(dirs)):
# plt.scatter(xticks[idx],zcors[idx],label=dirs[idx])
# plt.legend()
# avgz = np.mean(zcors,0)
# avgzer = np.std(zcors,0)
# avgx = np.mean(xticks,0)
# plt.errorbar(avgx,avgz,yerr=avgzer)
|
PypiClean
|
/mxnet_cu115-1.9.1-py3-none-manylinux2014_x86_64.whl/mxnet/contrib/quantization.py
|
try:
from scipy import stats
except ImportError:
stats = None
import ctypes
import logging
import os
import shutil
import warnings
import numpy as np
from ..base import _LIB, check_call, py_str
from ..base import c_array, c_str, mx_uint, c_str_array
from ..base import NDArrayHandle, SymbolHandle
from ..symbol import Symbol
from ..symbol import load as sym_load
from .. import ndarray
from ..ndarray import load as nd_load
from ..ndarray import save as nd_save
from ..ndarray import NDArray
from ..io import DataIter, DataDesc, DataBatch
from ..context import cpu, Context
from ..module import Module
def _quantize_params(qsym, params, th_dict):
"""Given a quantized symbol and a dict of params that have not been quantized,
generate quantized params. Currently only supports quantizing the arg_params
with names of `weight` or `bias`, not aux_params. If `qsym` contains symbols
that are excluded from being quantized, their corresponding params will
not be quantized, but saved together with quantized params of the symbols that
have been quantized.
Parameters
----------
qsym : Symbol
Quantized symbol from FP32 symbol.
params : dict of str->NDArray
th_dict: dict of min/max pairs of layers' output
"""
inputs_name = qsym.list_arguments()
quantized_params = {}
for name in inputs_name:
if name.endswith(('weight_quantize', 'bias_quantize')):
original_name = name[:-len('_quantize')]
param = params[original_name]
# pylint: disable=unbalanced-tuple-unpacking
val, vmin, vmax = ndarray.contrib.quantize(data=param,
min_range=ndarray.min(param),
max_range=ndarray.max(param),
out_type='int8')
quantized_params[name] = val
quantized_params[name+'_min'] = vmin
quantized_params[name+'_max'] = vmax
elif name in params:
quantized_params[name] = params[name]
elif name.endswith(('_min')):
output = name[: - len('_min')]
if output in th_dict:
quantized_params[name] = ndarray.array([th_dict[output][0]])
elif name.endswith(('_max')):
output = name[: - len('_min')]
if output in th_dict:
quantized_params[name] = ndarray.array([th_dict[output][1]])
return quantized_params
def _quantize_symbol(sym, ctx, excluded_symbols=None, excluded_operators=None,
offline_params=None, quantized_dtype='int8', quantize_mode='smart',
quantize_granularity='tensor-wise'):
"""Given a symbol object representing a neural network of data type FP32,
quantize it into a INT8 network.
Parameters
----------
sym : Symbol
FP32 neural network symbol.
ctx : Context
Defines the device that users want to run quantized symbol.
excluded_symbols : list of strings
A list of strings representing the names of the symbols that users want to excluding
from being quantized.
excluded_operators : list of strings
A list of strings representing the names of the operators that users want to excluding
from being quantized.
offline_params : list of strs
Names of the parameters that users want to quantize offline. It's always recommended to
quantize parameters offline so that quantizing parameters during the inference can be
avoided.
quantized_dtype: str
The quantized destination type for input data.
quantize_mode: str
The mode that quantization pass to apply.
quantize_granularity: str
The granularity of quantization, currently supports 'tensor-wise' and 'channel-wise'
quantization. The default value is 'tensor-wise'.
"""
num_excluded_symbols = 0
if excluded_symbols is not None:
assert isinstance(excluded_symbols, list)
num_excluded_symbols = len(excluded_symbols)
else:
excluded_symbols = []
num_excluded_ops = 0
if excluded_operators is not None:
assert isinstance(excluded_operators, list)
num_excluded_ops = len(excluded_operators)
else:
excluded_operators = []
num_offline = 0
offline = []
if offline_params is not None:
num_offline = len(offline_params)
for k in offline_params:
offline.append(c_str(k))
out = SymbolHandle()
size = mx_uint()
calib_str = ctypes.POINTER(ctypes.c_char_p)()
check_call(_LIB.MXQuantizeSymbol(sym.handle,
ctypes.byref(out),
ctypes.byref(ctypes.c_int(ctx.device_typeid)),
mx_uint(num_excluded_symbols),
c_str_array(excluded_symbols),
mx_uint(num_excluded_ops),
c_str_array(excluded_operators),
mx_uint(num_offline),
c_array(ctypes.c_char_p, offline),
c_str(quantized_dtype),
ctypes.c_bool(True),
c_str(quantize_mode),
c_str(quantize_granularity),
ctypes.byref(size),
ctypes.byref(calib_str)))
calib_layer = []
calib_layer = [py_str(calib_str[i]) for i in range(size.value)]
return Symbol(out), calib_layer
def combine_histogram(old_hist, arr, new_min, new_max, new_th):
""" Collect layer histogram for arr and combine it with old histogram.
"""
(old_hist, old_hist_edges, old_min, old_max, old_th) = old_hist
if new_th <= old_th:
hist, _ = np.histogram(arr, bins=len(old_hist), range=(-old_th, old_th))
return (old_hist + hist, old_hist_edges, min(old_min, new_min), max(old_max, new_max), old_th)
else:
# Need to generate new histogram with new_th
old_num_bins = len(old_hist)
old_step = 2 * old_th / old_num_bins
half_increased_bins = int((new_th - old_th) // old_step + 1)
new_num_bins = half_increased_bins * 2 + old_num_bins
new_th = half_increased_bins * old_step + old_th
hist, hist_edges = np.histogram(arr, bins=new_num_bins, range=(-new_th, new_th))
hist[half_increased_bins:new_num_bins - half_increased_bins] += old_hist
return (hist, hist_edges, min(old_min, new_min), max(old_max, new_max), new_th)
class _LayerHistogramCollector(object):
"""Saves layer histogram in a dict with layer names as keys and lists of NDArrays as
values. The collected histogram will be used for calculating the optimal thresholds for
quantization using KL divergence.
"""
def __init__(self, num_bins=8001, include_layer=None, logger=None):
self.hist_dict = {}
self.num_bins = num_bins
self.include_layer = include_layer
self.logger = logger
def collect(self, name, arr):
"""Callback function for collecting layer output NDArrays."""
name = py_str(name)
if name not in self.include_layer:
return
handle = ctypes.cast(arr, NDArrayHandle)
arr = NDArray(handle, writable=False).copyto(cpu()).asnumpy()
if self.logger:
self.logger.debug("Collecting layer %s histogram of shape %s" % (name, arr.shape))
min_range = np.min(arr)
max_range = np.max(arr)
th = max(abs(min_range), abs(max_range))
if name in self.hist_dict:
self.hist_dict[name] = combine_histogram(self.hist_dict[name], arr, min_range, max_range, th)
else:
hist, hist_edges = np.histogram(arr, bins=self.num_bins, range=(-th, th))
self.hist_dict[name] = (hist, hist_edges, min_range, max_range, th)
class _LayerOutputMinMaxCollector(object):
"""Saves layer output min and max values in a dict with layer names as keys.
The collected min and max values will be directly used as thresholds for quantization.
"""
def __init__(self, quantized_dtype, include_layer=None, logger=None):
self.min_max_dict = {}
self.quantized_dtype = quantized_dtype
self.include_layer = include_layer
self.logger = logger
def collect(self, name, arr):
"""Callback function for collecting min and max values from an NDArray."""
name = py_str(name)
if name not in self.include_layer:
return
handle = ctypes.cast(arr, NDArrayHandle)
arr = NDArray(handle, writable=False)
min_range = ndarray.min(arr).asscalar()
max_range = ndarray.max(arr).asscalar()
if name in self.min_max_dict:
cur_min_max = self.min_max_dict[name]
self.min_max_dict[name] = (min(cur_min_max[0], min_range),
max(cur_min_max[1], max_range))
else:
self.min_max_dict[name] = (min_range, max_range)
if self.logger:
self.logger.debug("Collecting layer %s min_range=%f, max_range=%f"
% (name, min_range, max_range))
def _calibrate_quantized_sym(qsym, th_dict):
"""Given a dictionary containing the thresholds for quantizing the layers,
set the thresholds into the quantized symbol as the params of requantize operators.
"""
if th_dict is None or len(th_dict) == 0:
return qsym
num_layer_outputs = len(th_dict)
layer_output_names = []
min_vals = []
max_vals = []
for k, v in th_dict.items():
layer_output_names.append(k)
min_vals.append(v[0])
max_vals.append(v[1])
calibrated_sym = SymbolHandle()
check_call(_LIB.MXSetCalibTableToQuantizedSymbol(qsym.handle,
mx_uint(num_layer_outputs),
c_str_array(layer_output_names),
c_array(ctypes.c_float, min_vals),
c_array(ctypes.c_float, max_vals),
ctypes.byref(calibrated_sym)))
return Symbol(calibrated_sym)
def _collect_layer_statistics(mod, data, collector, max_num_examples=None, logger=None):
if not isinstance(data, DataIter):
raise ValueError('Only supports data as a type of DataIter, while received type %s'
% str(type(data)))
mod._exec_group.execs[0].set_monitor_callback(collector.collect, monitor_all=True)
num_batches = 0
num_examples = 0
for batch in data:
mod.forward(data_batch=batch, is_train=False)
num_batches += 1
num_examples += data.batch_size
if max_num_examples is not None and num_examples >= max_num_examples:
break
if logger is not None:
logger.info("Collected statistics from %d batches with batch_size=%d"
% (num_batches, data.batch_size))
return num_examples
def _collect_layer_output_min_max(mod, data, quantized_dtype, include_layer=None,
max_num_examples=None, logger=None):
"""Collect min and max values from layer outputs and save them in
a dictionary mapped by layer names.
"""
collector = _LayerOutputMinMaxCollector(quantized_dtype=quantized_dtype,
include_layer=include_layer, logger=logger)
num_examples = _collect_layer_statistics(mod, data, collector, max_num_examples, logger)
return collector.min_max_dict, num_examples
def _collect_layer_histogram(mod, data, include_layer=None,
max_num_examples=None, logger=None):
"""Collect layer outputs and save them in a dictionary mapped by layer names."""
collector = _LayerHistogramCollector(include_layer=include_layer, logger=logger)
num_examples = _collect_layer_statistics(mod, data, collector, max_num_examples, logger)
return collector.hist_dict, num_examples
def _smooth_distribution(p, eps=0.0001):
"""Given a discrete distribution (may have not been normalized to 1),
smooth it by replacing zeros with eps multiplied by a scaling factor and taking the
corresponding amount off the non-zero values.
Ref: http://web.engr.illinois.edu/~hanj/cs412/bk3/KL-divergence.pdf
"""
is_zeros = (p == 0).astype(np.float32)
is_nonzeros = (p != 0).astype(np.float32)
n_zeros = is_zeros.sum()
n_nonzeros = p.size - n_zeros
if not n_nonzeros:
raise ValueError('The discrete probability distribution is malformed. All entries are 0.')
eps1 = eps * float(n_zeros) / float(n_nonzeros)
assert eps1 < 1.0, 'n_zeros=%d, n_nonzeros=%d, eps1=%f' % (n_zeros, n_nonzeros, eps1)
hist = p.astype(np.float32)
hist += eps * is_zeros + (-eps1) * is_nonzeros
assert (hist <= 0).sum() == 0
return hist
# pylint: disable=line-too-long
def _get_optimal_threshold(hist_data, quantized_dtype, num_quantized_bins=255):
"""Given a dataset, find the optimal threshold for quantizing it.
The reference distribution is `q`, and the candidate distribution is `p`.
`q` is a truncated version of the original distribution.
Ref: http://on-demand.gputechconf.com/gtc/2017/presentation/s7310-8-bit-inference-with-tensorrt.pdf
"""
(hist, hist_edges, min_val, max_val, _) = hist_data
num_bins = len(hist)
assert (num_bins % 2 == 1)
if min_val >= 0 and quantized_dtype in ['auto', 'uint8']:
# We need to move negative bins to positive bins to fit uint8 range.
num_quantized_bins = num_quantized_bins * 2 + 1
hist = ndarray.array(hist, ctx=cpu())
hist_edges = ndarray.array(hist_edges, ctx=cpu())
threshold, divergence = ndarray.contrib.calibrate_entropy(hist=hist,
hist_edges=hist_edges,
num_quantized_bins=num_quantized_bins)
threshold = threshold.asnumpy()
divergence = divergence.asnumpy()
return min_val, max_val, threshold, divergence
# pylint: enable=line-too-long
def _get_optimal_thresholds(hist_dict, quantized_dtype, num_quantized_bins=255, logger=None):
"""Given a ndarray dict, find the optimal threshold for quantizing each value of the key."""
if stats is None:
raise ImportError('scipy.stats is required for running entropy mode of calculating'
' the optimal thresholds for quantizing FP32 ndarrays into int8.'
' Please check if the scipy python bindings are installed.')
assert isinstance(hist_dict, dict)
if logger is not None:
logger.info('Calculating optimal thresholds for quantization using KL divergence'
' with num_quantized_bins=%d' % num_quantized_bins)
th_dict = {}
# copy hist_dict keys since the keys() only returns a view in python3
layer_names = list(hist_dict.keys())
for name in layer_names:
assert name in hist_dict
min_val, max_val, th, divergence = \
_get_optimal_threshold(hist_dict[name], quantized_dtype,
num_quantized_bins=num_quantized_bins)
if min_val >= 0 and quantized_dtype in ['auto', 'uint8']:
th_dict[name] = (0, th)
else:
th_dict[name] = (-th, th)
del hist_dict[name] # release the memory
if logger:
logger.debug('layer=%s, min_val=%f, max_val=%f, th=%f, divergence=%f'
% (name, min_val, max_val, th, divergence))
return th_dict
def _load_sym(sym, logger=None):
"""Given a str as a path the symbol .json file or a symbol, returns a Symbol object."""
if isinstance(sym, str): # sym is a symbol file path
cur_path = os.path.dirname(os.path.realpath(__file__))
symbol_file_path = os.path.join(cur_path, sym)
if logger:
logger.info('Loading symbol from file %s' % symbol_file_path)
return sym_load(symbol_file_path)
elif isinstance(sym, Symbol):
return sym
else:
raise ValueError('_load_sym only accepts Symbol or path to the symbol file,'
' while received type %s' % str(type(sym)))
def _load_params(params, logger=None):
"""Given a str as a path to the .params file or a pair of params,
returns two dictionaries representing arg_params and aux_params.
"""
if isinstance(params, str):
cur_path = os.path.dirname(os.path.realpath(__file__))
param_file_path = os.path.join(cur_path, params)
if logger:
logger.info('Loading params from file %s' % param_file_path)
save_dict = nd_load(param_file_path)
arg_params = {}
aux_params = {}
for k, v in save_dict.items():
tp, name = k.split(':', 1)
if tp == 'arg':
arg_params[name] = v
if tp == 'aux':
aux_params[name] = v
return arg_params, aux_params
elif isinstance(params, (tuple, list)) and len(params) == 2:
return params[0], params[1]
else:
raise ValueError('Unsupported params provided. Must be either a path to the param file or'
' a pair of dictionaries representing arg_params and aux_params')
# pylint: disable=super-init-not-called
class _DataIterWrapper(DataIter):
"""DataIter wrapper for general iterator, e.g., gluon dataloader"""
def __init__(self, calib_data):
self._data = calib_data
try:
calib_iter = iter(calib_data)
except TypeError as e:
raise TypeError('calib_data is not a valid iterator. {}'.format(str(e)))
data_example = next(calib_iter)
if isinstance(data_example, (list, tuple)):
data_example = list(data_example)
else:
data_example = [data_example]
# suppose there must be one label in data_example
# TODO(xinyu-intel): little tricky here, need to refactor.
num_data = len(data_example)
assert num_data > 0
# here reshape is to handle the 5D/6D input data
if len(data_example[0].shape) > 4:
data_example[0] = data_example[0].reshape((-1,) + data_example[0].shape[2:])
self.provide_data = [DataDesc(name='data', shape=(data_example[0].shape))]
self.provide_data += [DataDesc(name='data{}'.format(i), shape=x.shape) for i, x in enumerate(data_example[1:])]
# data0, data1, ..., label
if num_data >= 3:
self.provide_data = [DataDesc(name='data{}'.format(i), shape=x.shape)
for i, x in enumerate(data_example[0:])]
self.batch_size = data_example[0].shape[0]
self.reset()
def reset(self):
self._iter = iter(self._data)
def next(self):
next_data = next(self._iter)
# here reshape is to handle the 5D/6D input data
if len(next_data[0].shape) > 4:
next_data[0] = next_data[0].reshape((-1,) + next_data[0].shape[2:])
return DataBatch(data=next_data)
# pylint: enable=super-init-not-called
def _as_data_iter(calib_data):
"""Convert normal iterator to mx.io.DataIter while parsing the data_shapes"""
if isinstance(calib_data, DataIter):
# already validated DataIter, just return
return calib_data, calib_data.provide_data
calib_data = _DataIterWrapper(calib_data)
return calib_data, calib_data.provide_data
def quantize_model(sym, arg_params, aux_params,
data_names=('data',), label_names=('softmax_label',),
ctx=cpu(), excluded_sym_names=None, excluded_op_names=None, calib_mode='entropy',
calib_data=None, num_calib_examples=None,
quantized_dtype='int8', quantize_mode='smart',
quantize_granularity='tensor-wise', logger=None):
"""User-level API for generating a quantized model from a FP32 model w/ or w/o calibration.
The backend quantized operators are only enabled for Linux systems. Please do not run
inference using the quantized models on Windows for now.
The quantization implementation adopts the TensorFlow's approach:
https://www.tensorflow.org/performance/quantization.
The calibration implementation borrows the idea of Nvidia's 8-bit Inference with TensorRT:
http://on-demand.gputechconf.com/gtc/2017/presentation/s7310-8-bit-inference-with-tensorrt.pdf
and adapts the method to MXNet.
Parameters
----------
sym : str or Symbol
Defines the structure of a neural network for FP32 data types.
arg_params : dict
Dictionary of name to `NDArray`.
aux_params : dict
Dictionary of name to `NDArray`.
data_names : a list of strs
Data names required for creating a Module object to run forward propagation on the
calibration dataset.
label_names : a list of strs
Label names required for creating a Module object to run forward propagation on the
calibration dataset.
ctx : Context
Defines the device that users want to run forward propagation on the calibration
dataset for collecting layer output statistics. Currently, only supports single context.
excluded_sym_names : list of strings
A list of strings representing the names of the symbols that users want to excluding
from being quantized.
excluded_op_names : list of strings
A list of strings representing the names of the operators that users want to excluding
from being quantized.
calib_mode : str
If calib_mode='none', no calibration will be used and the thresholds for
requantization after the corresponding layers will be calculated at runtime by
calling min and max operators. The quantized models generated in this
mode are normally 10-20% slower than those with calibrations during inference.
If calib_mode='naive', the min and max values of the layer outputs from a calibration
dataset will be directly taken as the thresholds for quantization.
If calib_mode='entropy' (default mode), the thresholds for quantization will be
derived such that the KL divergence between the distributions of FP32 layer outputs and
quantized layer outputs is minimized based upon the calibration dataset.
calib_data : DataIter
A data iterator initialized by the calibration dataset.
num_calib_examples : int or None
The maximum number of examples that user would like to use for calibration. If not provided,
the whole calibration dataset will be used.
quantized_dtype : str
The quantized destination type for input data. Currently support 'int8', 'uint8' and 'auto'.
'auto' means automatically select output type according to calibration result.
Default value is 'int8'.
quantize_mode : str
The mode that quantization pass to apply. Support 'full' and 'smart'.
'full' means quantize all operator if possible.
'smart' means quantization pass will smartly choice which operator should be quantized.
quantize_granularity: str
The granularity of quantization, currently supports 'tensor-wise' and 'channel-wise'
quantization. The default value is 'tensor-wise'.
logger : Object
A logging object for printing information during the process of quantization.
Returns
-------
tuple
A tuple of quantized symbol, quantized arg_params, and aux_params.
-------
"""
if excluded_sym_names is None:
excluded_sym_names = []
if not isinstance(excluded_sym_names, list):
raise ValueError('excluded_sym_names must be a list of strings representing'
' the names of the symbols that will not be quantized,'
' while received type %s' % str(type(excluded_sym_names)))
if excluded_op_names is None:
excluded_op_names = []
if not isinstance(excluded_op_names, list):
raise ValueError('excluded_op_names must be a list of strings representing'
' the names of the operators that will not be quantized,'
' while received type %s' % str(type(excluded_op_names)))
if logger:
os.environ['MXNET_QUANTIZATION_VERBOSE'] = '1'
logger.info('Quantizing symbol')
if quantized_dtype not in ('int8', 'uint8', 'auto'):
raise ValueError('unknown quantized_dtype %s received,'
' expected `int8`, `uint8` or `auto`' % quantized_dtype)
if quantize_granularity not in ('tensor-wise', 'channel-wise'):
raise ValueError('unkonwn quantize_granularity %s received,'
' expected `tensor-wise` or `channel-wise`.' % quantize_granularity)
qsym, calib_layer = _quantize_symbol(sym, ctx, excluded_symbols=excluded_sym_names,
excluded_operators=excluded_op_names,
offline_params=list(arg_params.keys()),
quantized_dtype=quantized_dtype,
quantize_mode=quantize_mode,
quantize_granularity=quantize_granularity)
th_dict = {}
if calib_mode is not None and calib_mode != 'none':
if not isinstance(ctx, Context):
raise ValueError('currently only supports single ctx, while received %s' % str(ctx))
if calib_data is None:
raise ValueError('calib_data must be provided when calib_mode=%s' % calib_mode)
if not isinstance(calib_data, DataIter):
raise ValueError('calib_data must be of DataIter type when calib_mode=%s,'
' while received type %s' % (calib_mode, str(type(calib_data))))
mod = Module(symbol=sym, data_names=data_names, label_names=label_names, context=ctx)
if len(calib_data.provide_label) > 0:
mod.bind(for_training=False, data_shapes=calib_data.provide_data,
label_shapes=calib_data.provide_label)
else:
mod.bind(for_training=False, data_shapes=calib_data.provide_data)
mod.set_params(arg_params, aux_params)
if calib_mode == 'entropy':
hist_dict, num_examples = _collect_layer_histogram(mod, calib_data,
include_layer=calib_layer,
max_num_examples=num_calib_examples,
logger=logger)
if logger:
logger.info('Collected layer outputs from FP32 model using %d examples' % num_examples)
logger.info('Calculating optimal thresholds for quantization')
th_dict = _get_optimal_thresholds(hist_dict, quantized_dtype, logger=logger)
elif calib_mode == 'naive':
th_dict, num_examples = _collect_layer_output_min_max(
mod, calib_data, quantized_dtype, include_layer=calib_layer, max_num_examples=num_calib_examples,
logger=logger)
if logger:
logger.info('Collected layer output min/max values from FP32 model using %d examples'
% num_examples)
else:
raise ValueError('unknown calibration mode %s received,'
' expected `none`, `naive`, or `entropy`' % calib_mode)
qsym = _calibrate_quantized_sym(qsym, th_dict)
if logger:
logger.info('Quantizing parameters')
qarg_params = _quantize_params(qsym, arg_params, th_dict)
return qsym, qarg_params, aux_params
def quantize_model_mkldnn(sym, arg_params, aux_params,
data_names=('data',), label_names=('softmax_label',),
ctx=cpu(), excluded_sym_names=None, excluded_op_names=None,
calib_mode='entropy', calib_data=None, num_calib_examples=None,
quantized_dtype='int8', quantize_mode='smart',
quantize_granularity='tensor-wise', logger=None):
"""User-level API for generating a fusion + quantized model from a FP32 model
w/ or w/o calibration with Intel MKL-DNN.
The backend quantized operators are only enabled for Linux systems. Please do not run
inference using the quantized models on Windows for now.
Parameters
----------
same with quantize_model
Returns
-------
tuple
A tuple of quantized symbol, quantized arg_params, and aux_params.
-------
"""
if not isinstance(ctx, Context):
raise ValueError('currently only supports single ctx, while received %s' % str(ctx))
if ctx.device_type != 'cpu':
raise ValueError(
'quantize_model_mkldnn only support Intel cpu platform with MKL-DNN Backend')
sym = sym.get_backend_symbol('MKLDNN_QUANTIZE')
qsym, qarg_params, aux_params = quantize_model(sym=sym, arg_params=arg_params, aux_params=aux_params,
data_names=data_names, label_names=label_names,
ctx=ctx, excluded_sym_names=excluded_sym_names,
excluded_op_names=excluded_op_names,
calib_mode=calib_mode, calib_data=calib_data,
num_calib_examples=num_calib_examples,
quantized_dtype=quantized_dtype, quantize_mode=quantize_mode,
quantize_granularity=quantize_granularity, logger=logger)
qsym = qsym.get_backend_symbol('MKLDNN_QUANTIZE')
return qsym, qarg_params, aux_params
def quantize_graph(sym, arg_params, aux_params, ctx=cpu(),
excluded_sym_names=None, excluded_op_names=None,
calib_mode='entropy', quantized_dtype='int8',
quantize_mode='full', quantize_granularity='tensor-wise',
LayerOutputCollector=None, logger=None):
"""User-level API for generating a quantized model from a FP32 model w/o calibration
and a collector for naive or entropy calibration.
The backend quantized operators are only enabled for Linux systems. Please do not run
inference using the quantized models on Windows for now.
Parameters
----------
sym : str or Symbol
Defines the structure of a neural network for FP32 data types.
ctx : Context
Defines the device that users want to run forward propagation on the calibration
dataset for collecting layer output statistics. Currently, only supports single context.
arg_params : dict
Dictionary of name to `NDArray`.
aux_params : dict
Dictionary of name to `NDArray`.
excluded_sym_names : list of strings
A list of strings representing the names of the symbols that users want to excluding
from being quantized.
excluded_op_names : list of strings
A list of strings representing the names of the operators that users want to excluding
calib_mode : str
If calib_mode='none', no calibration will be used and the thresholds for
requantization after the corresponding layers will be calculated at runtime by
calling min and max operators. The quantized models generated in this
mode are normally 10-20% slower than those with calibrations during inference.
If calib_mode='naive', the min and max values of the layer outputs from a calibration
dataset will be directly taken as the thresholds for quantization.
If calib_mode='entropy' (default mode), the thresholds for quantization will be
derived such that the KL divergence between the distributions of FP32 layer outputs and
quantized layer outputs is minimized based upon the calibration dataset.
quantized_dtype : str
The quantized destination type for input data. Currently support 'int8'
, 'uint8' and 'auto'. 'auto' means automatically select output type according to calibration result.
Default value is 'int8'.
quantize_mode : str
The mode that quantization pass to apply. Support 'full' and 'smart'.
'full' means quantize all operator if possible.
'smart' means quantization pass will smartly choice which operator should be quantized.
quantize_granularity: str
The granularity of quantization, currently supports 'tensor-wise' and 'channel-wise'
quantization. The default value is 'tensor-wise'.
LayerOutputCollector : class
For customize calibration method usage.
logger : Object
A logging object for printing information during the process of quantization.
Returns
-------
tuple
A tuple of quantized symbol, quantized arg_params, aux_params and collector.
-------
"""
if excluded_sym_names is None:
excluded_sym_names = []
if not isinstance(excluded_sym_names, list):
raise ValueError('excluded_sym_names must be a list of strings representing'
' the names of the symbols that will not be quantized,'
' while received type %s' % str(type(excluded_sym_names)))
if not isinstance(ctx, Context):
raise ValueError('currently only supports single ctx, while received %s' % str(ctx))
if logger:
os.environ['MXNET_QUANTIZATION_VERBOSE'] = '1'
logger.info('Quantizing graph')
if quantized_dtype not in ('int8', 'uint8', 'auto'):
raise ValueError('unknown quantized_dtype %s received,'
' expected `int8`, `uint8` or `auto`' % quantized_dtype)
if quantize_granularity not in ('tensor-wise', 'channel-wise'):
raise ValueError('unkonwn quantize_granularity %s received,'
' expected `tensor-wise` or `channel-wise`.' % quantize_granularity)
qsym, calib_layer = _quantize_symbol(sym, ctx, excluded_symbols=excluded_sym_names,
excluded_operators=excluded_op_names,
offline_params=list(
arg_params.keys()),
quantized_dtype=quantized_dtype,
quantize_mode=quantize_mode,
quantize_granularity=quantize_granularity)
th_dict = {}
collector = None
if calib_mode is not None and calib_mode != 'none':
if calib_mode == 'entropy':
collector = _LayerHistogramCollector(
include_layer=calib_layer, logger=logger)
if logger:
logger.info(
'Create a layer output collector for entropy calibration.')
elif calib_mode == 'naive':
collector = _LayerOutputMinMaxCollector(quantized_dtype=quantized_dtype,
include_layer=calib_layer, logger=logger)
if logger:
logger.info(
'Create a layer output minmax collector for naive calibration')
elif calib_mode == 'customize' and LayerOutputCollector is not None:
collector = LayerOutputCollector
if logger:
logger.info(
'Create a customize layer output minmax collector for calibration')
else:
raise ValueError('unknown calibration mode %s received,'
' expected `none`, `naive`, `entropy` or `customize`' % calib_mode)
if logger:
logger.info('Collector created, please use set_monitor_callback'
' to collect calibration information.')
if logger:
logger.info('Quantizing parameters')
qarg_params = _quantize_params(qsym, arg_params, th_dict)
return qsym, qarg_params, aux_params, collector
def calib_graph(qsym, arg_params, aux_params, collector,
calib_mode='entropy', quantized_dtype='int8', logger=logging):
"""User-level API for calibrating a quantized model using a filled collector.
The backend quantized operators are only enabled for Linux systems. Please do not run
inference using the quantized models on Windows for now.
Parameters
----------
qsym : str or Symbol
Defines the structure of a neural network for INT8 data types.
arg_params : dict
Dictionary of name to `NDArray`.
aux_params : dict
Dictionary of name to `NDArray`.
collector : function
layer collector for naive or entropy calibration.
calib_mode : str
If calib_mode='none', no calibration will be used and the thresholds for
requantization after the corresponding layers will be calculated at runtime by
calling min and max operators. The quantized models generated in this
mode are normally 10-20% slower than those with calibrations during inference.
If calib_mode='naive', the min and max values of the layer outputs from a calibration
dataset will be directly taken as the thresholds for quantization.
If calib_mode='entropy' (default mode), the thresholds for quantization will be
derived such that the KL divergence between the distributions of FP32 layer outputs and
quantized layer outputs is minimized based upon the calibration dataset.
quantized_dtype : str
The quantized destination type for input data. Currently support 'int8'
, 'uint8' and 'auto'. 'auto' means automatically select output type according to calibration result.
Default value is 'int8'.
logger : Object
A logging object for printing information during the process of quantization.
Returns
-------
tuple
A tuple of calibrated symbol, quantized arg_params, aux_params.
-------
"""
th_dict = {}
if calib_mode is not None and calib_mode != 'none':
if calib_mode == 'entropy':
if logger:
logger.info('Calculating optimal thresholds for quantization')
th_dict = _get_optimal_thresholds(
collector.hist_dict, quantized_dtype, logger=logger)
elif calib_mode == 'naive':
th_dict = collector.min_max_dict
elif calib_mode == 'customize':
th_dict = collector.min_max_dict
else:
raise ValueError('unknown calibration mode %s received,'
' expected `none`, `naive`, `entropy` or `customize`' % calib_mode)
qsym = _calibrate_quantized_sym(qsym, th_dict)
else:
raise ValueError('please set calibration mode to naive or entropy.')
if logger:
logger.info('Quantizing parameters')
qarg_params = _quantize_params(qsym, arg_params, th_dict)
return qsym, qarg_params, aux_params
def quantize_net_v2(network, quantized_dtype='auto', quantize_mode='full', quantize_granularity='tensor-wise',
exclude_layers=None, exclude_layers_match=None, exclude_operators=None,
calib_data=None, data_shapes=None, calib_mode='none',
num_calib_examples=None, ctx=cpu(), LayerOutputCollector=None, logger=None):
"""User-level API for Gluon users to generate a quantized SymbolBlock from a FP32 HybridBlock w/ or w/o calibration.
The backend quantized operators are only enabled for Linux systems. Please do not run
inference using the quantized models on Windows for now.
Parameters
----------
network : Gluon HybridBlock
Defines the structure of a neural network for FP32 data types.
quantized_dtype : str
The quantized destination type for input data. Currently support 'int8'
, 'uint8' and 'auto'. 'auto' means automatically select output type according to calibration result.
Default value is 'int8'.
quantize_mode : str
The mode that quantization pass to apply. Support 'full' and 'smart'.
'full' means quantize all operator if possible.
'smart' means quantization pass will smartly choice which operator should be quantized.
quantize_granularity: str
The granularity of quantization, currently supports 'tensor-wise' and 'channel-wise'
quantization. The default value is 'tensor-wise'.
exclude_layers : list of strings
A list of strings representing the names of the symbols that users want to excluding
exclude_layers_match : list of strings
A list of strings wildcard matching the names of the symbols that users want to excluding
from being quantized.
exclude_operators : list of strings
A list of strings representing the names of the operators that users want to excluding
calib_data : mx.io.DataIter or gluon.DataLoader
A iterable data loading object.
data_shapes : list
List of DataDesc, required if calib_data is not provided
calib_mode : str
If calib_mode='none', no calibration will be used and the thresholds for
requantization after the corresponding layers will be calculated at runtime by
calling min and max operators. The quantized models generated in this
mode are normally 10-20% slower than those with calibrations during inference.
If calib_mode='naive', the min and max values of the layer outputs from a calibration
dataset will be directly taken as the thresholds for quantization.
If calib_mode='entropy' (default mode), the thresholds for quantization will be
derived such that the KL divergence between the distributions of FP32 layer outputs and
quantized layer outputs is minimized based upon the calibration dataset.
num_calib_examples : int or None
The maximum number of examples that user would like to use for calibration. If not provided,
the whole calibration dataset will be used.
ctx : Context
Defines the device that users want to run forward propagation on the calibration
dataset for collecting layer output statistics. Currently, only supports single context.
LayerOutputCollector : class
For customize calibration method usage.
logger : Object
A logging object for printing information during the process of quantization.
Returns
-------
network : Gluon SymbolBlock
Defines the structure of a neural network for INT8 data types.
-------
"""
if logger:
logger.info('Export HybridBlock')
network.hybridize()
import mxnet as mx
if calib_data is not None:
if isinstance(calib_data, DataIter):
dshapes = calib_data.provide_data
else:
calib_data, dshapes = _as_data_iter(calib_data)
if not data_shapes:
data_shapes = dshapes
if not data_shapes:
raise ValueError('data_shapes required')
data_nd = []
for shape in data_shapes:
data_nd.append(mx.nd.zeros(shape.shape))
while True:
try:
network(*data_nd)
except TypeError:
del data_nd[-1]
del calib_data.provide_data[-1]
continue
else:
break
import tempfile
try:
from tempfile import TemporaryDirectory
except ImportError:
# really simple implementation of TemporaryDirectory
class TemporaryDirectory(object):
def __init__(self, suffix='', prefix='', dir=''):
self._dirname = tempfile.mkdtemp(suffix, prefix, dir)
def __enter__(self):
return self._dirname
def __exit__(self, exc_type, exc_value, traceback):
shutil.rmtree(self._dirname)
# TODO(xinyu-intel): tmp solution to save and reload for mxnet.mod.Module.
# will enhance `export` function to return `sym, args, auxs` directly.
with TemporaryDirectory() as tmpdirname:
prefix = os.path.join(tmpdirname, 'tmp')
network.export(prefix, epoch=0)
symnet, args, auxs = mx.model.load_checkpoint(prefix, 0)
if exclude_layers is None:
exclude_layers = []
if exclude_layers_match is None:
exclude_layers_match = []
if exclude_operators is None:
exclude_operators = []
for name_match in exclude_layers_match:
for layers in list(symnet.get_internals()):
if layers.name.find(name_match) != -1:
exclude_layers.append(layers.name)
if logger:
logger.info('These layers have been excluded %s' % exclude_layers)
if ctx == mx.cpu():
symnet = symnet.get_backend_symbol('MKLDNN_QUANTIZE')
qsym, qarg_params, aux_params, collector = quantize_graph(
sym=symnet, arg_params=args, aux_params=auxs, ctx=ctx,
excluded_sym_names=exclude_layers, excluded_op_names=exclude_operators,
calib_mode=calib_mode, quantized_dtype=quantized_dtype, quantize_mode=quantize_mode,
quantize_granularity=quantize_granularity, LayerOutputCollector=LayerOutputCollector,
logger=logger)
if calib_mode is not None and calib_mode != 'none':
if not isinstance(ctx, Context):
raise ValueError(
'currently only supports single ctx, while received %s' % str(ctx))
if calib_data is None:
raise ValueError(
'calib_data must be provided when calib_mode=%s' % calib_mode)
if calib_mode in ['naive', 'entropy', 'customize']:
data_names = [pair[0] for pair in calib_data.provide_data]
mod = Module(symbol=symnet, context=ctx,
data_names=data_names, label_names=None)
mod.bind(for_training=False, data_shapes=data_shapes)
mod.set_params(args, auxs, allow_missing=False, force_init=True)
num_examples = _collect_layer_statistics(mod, calib_data, collector,
num_calib_examples, logger)
if logger:
logger.info('Collected layer output values from FP32 model using %d examples'
% num_examples)
qsym, qarg_params, aux_params = calib_graph(
qsym=qsym, arg_params=args, aux_params=auxs, collector=collector,
calib_mode=calib_mode, quantized_dtype=quantized_dtype, logger=logger)
else:
raise ValueError(
'please set calibration mode to naive or entropy.')
elif calib_mode is not None and calib_mode == 'none':
data_names = [pair[0] for pair in data_shapes]
if ctx == mx.cpu():
qsym = qsym.get_backend_symbol('MKLDNN_QUANTIZE')
from ..gluon import SymbolBlock
data_sym = []
for name in data_names:
data_sym.append(mx.sym.var(name))
net = SymbolBlock(qsym, data_sym)
# TODO(xinyu-intel): tmp solution to save param_dict and reload for SymbolBlock
# will enhance SymbolBlock to load args, auxs directly.
with TemporaryDirectory() as tmpdirname:
prefix = os.path.join(tmpdirname, 'tmp')
param_name = '%s-%04d.params' % (prefix + 'net-quantized', 0)
save_dict = {('arg:%s' % k): v.as_in_context(cpu())
for k, v in qarg_params.items()}
save_dict.update({('aux:%s' % k): v.as_in_context(cpu())
for k, v in aux_params.items()})
nd_save(param_name, save_dict)
net.collect_params().load(param_name, cast_dtype=True, dtype_source='saved')
net.collect_params().reset_ctx(ctx)
return net
def quantize_net(network, quantized_dtype='auto', quantize_mode='full',
exclude_layers=None, exclude_layers_match=None, exclude_operators=None,
calib_data=None, data_shapes=None, calib_mode='none',
num_calib_examples=None, ctx=cpu(), logger=None):
"""User-level API for Gluon users to generate a quantized SymbolBlock from a FP32 HybridBlock w/ or w/o calibration.
Will be deprecated after MXNet 2.0, please use quantize_net_v2.
"""
warnings.warn('WARNING: This will be deprecated after MXNet 2.0, please use quantize_net_v2.')
return quantize_net_v2(network=network, quantized_dtype=quantized_dtype,
quantize_mode=quantize_mode,
quantize_granularity='tensor-wise',
exclude_layers=exclude_layers,
exclude_layers_match=exclude_layers_match,
exclude_operators=exclude_operators,
calib_data=calib_data, data_shapes=data_shapes,
calib_mode=calib_mode, num_calib_examples=num_calib_examples,
ctx=ctx, LayerOutputCollector=None, logger=logger)
|
PypiClean
|
/tensorflow-2.1.1-cp36-cp36m-macosx_10_11_x86_64.whl/tensorflow_core/python/layers/normalization.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.keras.layers import normalization as keras_normalization
from tensorflow.python.layers import base
from tensorflow.python.ops import init_ops
from tensorflow.python.util import deprecation
from tensorflow.python.util.tf_export import tf_export
@tf_export(v1=['layers.BatchNormalization'])
class BatchNormalization(keras_normalization.BatchNormalization, base.Layer):
"""Batch Normalization layer from http://arxiv.org/abs/1502.03167.
"Batch Normalization: Accelerating Deep Network Training by Reducing
Internal Covariate Shift"
Sergey Ioffe, Christian Szegedy
Keras APIs handle BatchNormalization updates to the moving_mean and
moving_variance as part of their `fit()` and `evaluate()` loops. However, if a
custom training loop is used with an instance of `Model`, these updates need
to be explicitly included. Here's a simple example of how it can be done:
```python
# model is an instance of Model that contains BatchNormalization layer.
update_ops = model.get_updates_for(None) + model.get_updates_for(features)
train_op = optimizer.minimize(loss)
train_op = tf.group([train_op, update_ops])
```
Arguments:
axis: An `int` or list of `int`, the axis or axes that should be
normalized, typically the features axis/axes. For instance, after a
`Conv2D` layer with `data_format="channels_first"`, set `axis=1`. If a
list of axes is provided, each axis in `axis` will be normalized
simultaneously. Default is `-1` which uses the last axis. Note: when
using multi-axis batch norm, the `beta`, `gamma`, `moving_mean`, and
`moving_variance` variables are the same rank as the input Tensor, with
dimension size 1 in all reduced (non-axis) dimensions).
momentum: Momentum for the moving average.
epsilon: Small float added to variance to avoid dividing by zero.
center: If True, add offset of `beta` to normalized tensor. If False, `beta`
is ignored.
scale: If True, multiply by `gamma`. If False, `gamma` is
not used. When the next layer is linear (also e.g. `nn.relu`), this can be
disabled since the scaling can be done by the next layer.
beta_initializer: Initializer for the beta weight.
gamma_initializer: Initializer for the gamma weight.
moving_mean_initializer: Initializer for the moving mean.
moving_variance_initializer: Initializer for the moving variance.
beta_regularizer: Optional regularizer for the beta weight.
gamma_regularizer: Optional regularizer for the gamma weight.
beta_constraint: An optional projection function to be applied to the `beta`
weight after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
gamma_constraint: An optional projection function to be applied to the
`gamma` weight after being updated by an `Optimizer`.
renorm: Whether to use Batch Renormalization
(https://arxiv.org/abs/1702.03275). This adds extra variables during
training. The inference is the same for either value of this parameter.
renorm_clipping: A dictionary that may map keys 'rmax', 'rmin', 'dmax' to
scalar `Tensors` used to clip the renorm correction. The correction
`(r, d)` is used as `corrected_value = normalized_value * r + d`, with
`r` clipped to [rmin, rmax], and `d` to [-dmax, dmax]. Missing rmax, rmin,
dmax are set to inf, 0, inf, respectively.
renorm_momentum: Momentum used to update the moving means and standard
deviations with renorm. Unlike `momentum`, this affects training
and should be neither too small (which would add noise) nor too large
(which would give stale estimates). Note that `momentum` is still applied
to get the means and variances for inference.
fused: if `None` or `True`, use a faster, fused implementation if possible.
If `False`, use the system recommended implementation.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
virtual_batch_size: An `int`. By default, `virtual_batch_size` is `None`,
which means batch normalization is performed across the whole batch. When
`virtual_batch_size` is not `None`, instead perform "Ghost Batch
Normalization", which creates virtual sub-batches which are each
normalized separately (with shared gamma, beta, and moving statistics).
Must divide the actual batch size during execution.
adjustment: A function taking the `Tensor` containing the (dynamic) shape of
the input tensor and returning a pair (scale, bias) to apply to the
normalized values (before gamma and beta), only during training. For
example, if axis==-1,
`adjustment = lambda shape: (
tf.random.uniform(shape[-1:], 0.93, 1.07),
tf.random.uniform(shape[-1:], -0.1, 0.1))`
will scale the normalized value by up to 7% up or down, then shift the
result by up to 0.1 (with independent scaling and bias for each feature
but shared across all examples), and finally apply gamma and/or beta. If
`None`, no adjustment is applied. Cannot be specified if
virtual_batch_size is specified.
name: A string, the name of the layer.
"""
def __init__(self,
axis=-1,
momentum=0.99,
epsilon=1e-3,
center=True,
scale=True,
beta_initializer=init_ops.zeros_initializer(),
gamma_initializer=init_ops.ones_initializer(),
moving_mean_initializer=init_ops.zeros_initializer(),
moving_variance_initializer=init_ops.ones_initializer(),
beta_regularizer=None,
gamma_regularizer=None,
beta_constraint=None,
gamma_constraint=None,
renorm=False,
renorm_clipping=None,
renorm_momentum=0.99,
fused=None,
trainable=True,
virtual_batch_size=None,
adjustment=None,
name=None,
**kwargs):
super(BatchNormalization, self).__init__(
axis=axis,
momentum=momentum,
epsilon=epsilon,
center=center,
scale=scale,
beta_initializer=beta_initializer,
gamma_initializer=gamma_initializer,
moving_mean_initializer=moving_mean_initializer,
moving_variance_initializer=moving_variance_initializer,
beta_regularizer=beta_regularizer,
gamma_regularizer=gamma_regularizer,
beta_constraint=beta_constraint,
gamma_constraint=gamma_constraint,
renorm=renorm,
renorm_clipping=renorm_clipping,
renorm_momentum=renorm_momentum,
fused=fused,
trainable=trainable,
virtual_batch_size=virtual_batch_size,
adjustment=adjustment,
name=name,
**kwargs)
def call(self, inputs, training=False):
return super(BatchNormalization, self).call(inputs, training=training)
@deprecation.deprecated(
date=None, instructions='Use keras.layers.BatchNormalization instead. In '
'particular, `tf.control_dependencies(tf.GraphKeys.UPDATE_OPS)` should not '
'be used (consult the `tf.keras.layers.BatchNormalization` '
'documentation).')
@tf_export(v1=['layers.batch_normalization'])
def batch_normalization(inputs,
axis=-1,
momentum=0.99,
epsilon=1e-3,
center=True,
scale=True,
beta_initializer=init_ops.zeros_initializer(),
gamma_initializer=init_ops.ones_initializer(),
moving_mean_initializer=init_ops.zeros_initializer(),
moving_variance_initializer=init_ops.ones_initializer(),
beta_regularizer=None,
gamma_regularizer=None,
beta_constraint=None,
gamma_constraint=None,
training=False,
trainable=True,
name=None,
reuse=None,
renorm=False,
renorm_clipping=None,
renorm_momentum=0.99,
fused=None,
virtual_batch_size=None,
adjustment=None):
"""Functional interface for the batch normalization layer.
Reference: http://arxiv.org/abs/1502.03167
"Batch Normalization: Accelerating Deep Network Training by Reducing
Internal Covariate Shift"
Sergey Ioffe, Christian Szegedy
Note: when training, the moving_mean and moving_variance need to be updated.
By default the update ops are placed in `tf.GraphKeys.UPDATE_OPS`, so they
need to be executed alongside the `train_op`. Also, be sure to add any
batch_normalization ops before getting the update_ops collection. Otherwise,
update_ops will be empty, and training/inference will not work properly. For
example:
```python
x_norm = tf.compat.v1.layers.batch_normalization(x, training=training)
# ...
update_ops = tf.compat.v1.get_collection(tf.GraphKeys.UPDATE_OPS)
train_op = optimizer.minimize(loss)
train_op = tf.group([train_op, update_ops])
```
Arguments:
inputs: Tensor input.
axis: An `int`, the axis that should be normalized (typically the features
axis). For instance, after a `Convolution2D` layer with
`data_format="channels_first"`, set `axis=1` in `BatchNormalization`.
momentum: Momentum for the moving average.
epsilon: Small float added to variance to avoid dividing by zero.
center: If True, add offset of `beta` to normalized tensor. If False, `beta`
is ignored.
scale: If True, multiply by `gamma`. If False, `gamma` is
not used. When the next layer is linear (also e.g. `nn.relu`), this can be
disabled since the scaling can be done by the next layer.
beta_initializer: Initializer for the beta weight.
gamma_initializer: Initializer for the gamma weight.
moving_mean_initializer: Initializer for the moving mean.
moving_variance_initializer: Initializer for the moving variance.
beta_regularizer: Optional regularizer for the beta weight.
gamma_regularizer: Optional regularizer for the gamma weight.
beta_constraint: An optional projection function to be applied to the `beta`
weight after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
gamma_constraint: An optional projection function to be applied to the
`gamma` weight after being updated by an `Optimizer`.
training: Either a Python boolean, or a TensorFlow boolean scalar tensor
(e.g. a placeholder). Whether to return the output in training mode
(normalized with statistics of the current batch) or in inference mode
(normalized with moving statistics). **NOTE**: make sure to set this
parameter correctly, or else your training/inference will not work
properly.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
name: String, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
renorm: Whether to use Batch Renormalization
(https://arxiv.org/abs/1702.03275). This adds extra variables during
training. The inference is the same for either value of this parameter.
renorm_clipping: A dictionary that may map keys 'rmax', 'rmin', 'dmax' to
scalar `Tensors` used to clip the renorm correction. The correction
`(r, d)` is used as `corrected_value = normalized_value * r + d`, with
`r` clipped to [rmin, rmax], and `d` to [-dmax, dmax]. Missing rmax, rmin,
dmax are set to inf, 0, inf, respectively.
renorm_momentum: Momentum used to update the moving means and standard
deviations with renorm. Unlike `momentum`, this affects training
and should be neither too small (which would add noise) nor too large
(which would give stale estimates). Note that `momentum` is still applied
to get the means and variances for inference.
fused: if `None` or `True`, use a faster, fused implementation if possible.
If `False`, use the system recommended implementation.
virtual_batch_size: An `int`. By default, `virtual_batch_size` is `None`,
which means batch normalization is performed across the whole batch. When
`virtual_batch_size` is not `None`, instead perform "Ghost Batch
Normalization", which creates virtual sub-batches which are each
normalized separately (with shared gamma, beta, and moving statistics).
Must divide the actual batch size during execution.
adjustment: A function taking the `Tensor` containing the (dynamic) shape of
the input tensor and returning a pair (scale, bias) to apply to the
normalized values (before gamma and beta), only during training. For
example, if axis==-1,
`adjustment = lambda shape: (
tf.random.uniform(shape[-1:], 0.93, 1.07),
tf.random.uniform(shape[-1:], -0.1, 0.1))`
will scale the normalized value by up to 7% up or down, then shift the
result by up to 0.1 (with independent scaling and bias for each feature
but shared across all examples), and finally apply gamma and/or beta. If
`None`, no adjustment is applied. Cannot be specified if
virtual_batch_size is specified.
Returns:
Output tensor.
Raises:
ValueError: if eager execution is enabled.
"""
layer = BatchNormalization(
axis=axis,
momentum=momentum,
epsilon=epsilon,
center=center,
scale=scale,
beta_initializer=beta_initializer,
gamma_initializer=gamma_initializer,
moving_mean_initializer=moving_mean_initializer,
moving_variance_initializer=moving_variance_initializer,
beta_regularizer=beta_regularizer,
gamma_regularizer=gamma_regularizer,
beta_constraint=beta_constraint,
gamma_constraint=gamma_constraint,
renorm=renorm,
renorm_clipping=renorm_clipping,
renorm_momentum=renorm_momentum,
fused=fused,
trainable=trainable,
virtual_batch_size=virtual_batch_size,
adjustment=adjustment,
name=name,
_reuse=reuse,
_scope=name)
return layer.apply(inputs, training=training)
# Aliases
BatchNorm = BatchNormalization
batch_norm = batch_normalization
|
PypiClean
|
/tensorflow_ascend-1.15.0-cp37-cp37m-manylinux2014_aarch64.whl/tensorflow_core/python/ops/lookup_ops.py
|
"""Lookup operations."""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import uuid
import six
from tensorflow.python.compat import compat as fwd_compat
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gen_lookup_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import string_ops
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_lookup_ops import *
from tensorflow.python.training.saver import BaseSaverBuilder
# pylint: enable=wildcard-import
from tensorflow.python.training.tracking import base as trackable_base
from tensorflow.python.training.tracking import tracking as trackable
from tensorflow.python.util import compat
from tensorflow.python.util.deprecation import deprecated
from tensorflow.python.util.tf_export import tf_export
@tf_export(v1=["initialize_all_tables"])
@deprecated(None, "Use `tf.tables_initializer` instead.")
def initialize_all_tables(name="init_all_tables"):
"""Returns an Op that initializes all tables of the default graph.
Args:
name: Optional name for the initialization op.
Returns:
An Op that initializes all tables. Note that if there are
not tables the returned Op is a NoOp.
"""
return tables_initializer(name)
@tf_export(v1=["initializers.tables_initializer", "tables_initializer"])
def tables_initializer(name="init_all_tables"):
"""Returns an Op that initializes all tables of the default graph.
See the [Low Level
Intro](https://www.tensorflow.org/guide/low_level_intro#feature_columns)
guide, for an example of usage.
Args:
name: Optional name for the initialization op.
Returns:
An Op that initializes all tables. Note that if there are
not tables the returned Op is a NoOp.
"""
initializers = ops.get_collection(ops.GraphKeys.TABLE_INITIALIZERS)
if initializers:
return control_flow_ops.group(*initializers, name=name)
return control_flow_ops.no_op(name=name)
def _check_table_dtypes(table, key_dtype, value_dtype):
"""Check that the given key_dtype and value_dtype matches the table dtypes.
Args:
table: The table to check types against to.
key_dtype: The key data type to check.
value_dtype: The value data type to check.
Raises:
TypeError: when 'key_dtype' or 'value_dtype' doesn't match the table data
types.
"""
if key_dtype.base_dtype != table.key_dtype:
raise TypeError("Invalid key dtype, expected %s but got %s." %
(table.key_dtype, key_dtype))
if value_dtype.base_dtype != table.value_dtype:
raise TypeError("Invalid value dtype, expected %s but got %s." %
(table.value_dtype, value_dtype))
class LookupInterface(trackable.TrackableResource):
"""Represent a lookup table that persists across different steps."""
def __init__(self, key_dtype, value_dtype):
"""Construct a lookup table interface.
Args:
key_dtype: The table key type.
value_dtype: The table value type.
"""
self._key_dtype = dtypes.as_dtype(key_dtype)
self._value_dtype = dtypes.as_dtype(value_dtype)
super(LookupInterface, self).__init__()
def _create_resource(self):
raise NotImplementedError
@property
def key_dtype(self):
"""The table key dtype."""
return self._key_dtype
@property
def value_dtype(self):
"""The table value dtype."""
return self._value_dtype
@property
def name(self):
"""The name of the table."""
return NotImplementedError
def size(self, name=None):
"""Compute the number of elements in this table."""
raise NotImplementedError
def lookup(self, keys, name=None):
"""Looks up `keys` in a table, outputs the corresponding values."""
raise NotImplementedError
class InitializableLookupTableBase(LookupInterface):
"""Initializable lookup table interface.
An initializable lookup tables persist across different steps.
"""
def __init__(self, default_value, initializer):
"""Construct a table object from a table reference.
If requires a table initializer object (subclass of `TableInitializerBase`).
It provides the table key and value types, as well as the op to initialize
the table. The caller is responsible to execute the initialization op.
Args:
default_value: The value to use if a key is missing in the table.
initializer: The table initializer to use.
"""
super(InitializableLookupTableBase, self).__init__(initializer.key_dtype,
initializer.value_dtype)
self._default_value = ops.convert_to_tensor(
default_value, dtype=self._value_dtype)
self._default_value.get_shape().merge_with(tensor_shape.TensorShape([]))
if isinstance(initializer, trackable_base.Trackable):
self._initializer = self._track_trackable(initializer, "_initializer")
with ops.init_scope():
self._resource_handle = self._create_resource()
if (not context.executing_eagerly() and
ops.get_default_graph()._get_control_flow_context() is not None): # pylint: disable=protected-access
with ops.init_scope():
self._init_op = self._initialize()
else:
self._init_op = self._initialize()
def _initialize(self):
return self._initializer.initialize(self)
@property
def default_value(self):
"""The default value of the table."""
return self._default_value
def size(self, name=None):
"""Compute the number of elements in this table.
Args:
name: A name for the operation (optional).
Returns:
A scalar tensor containing the number of elements in this table.
"""
with ops.name_scope(name, "%s_Size" % self.name, [self.resource_handle]):
return gen_lookup_ops.lookup_table_size_v2(self.resource_handle)
def lookup(self, keys, name=None):
"""Looks up `keys` in a table, outputs the corresponding values.
The `default_value` is used for keys not present in the table.
Args:
keys: Keys to look up. May be either a `SparseTensor` or dense `Tensor`.
name: A name for the operation (optional).
Returns:
A `SparseTensor` if keys are sparse, otherwise a dense `Tensor`.
Raises:
TypeError: when `keys` or `default_value` doesn't match the table data
types.
"""
key_tensor = keys
if isinstance(keys, sparse_tensor.SparseTensor):
key_tensor = keys.values
if keys.dtype.base_dtype != self._key_dtype:
raise TypeError("Signature mismatch. Keys must be dtype %s, got %s." %
(self._key_dtype, keys.dtype))
with ops.name_scope(
name, "%s_Lookup" % self.name,
(self.resource_handle, key_tensor, self._default_value)):
values = gen_lookup_ops.lookup_table_find_v2(self.resource_handle,
key_tensor,
self._default_value)
values.set_shape(key_tensor.get_shape())
if isinstance(keys, sparse_tensor.SparseTensor):
return sparse_tensor.SparseTensor(keys.indices, values, keys.dense_shape)
else:
return values
class InitializableLookupTableBaseV1(InitializableLookupTableBase):
@property
def initializer(self):
return self._init_op
@tf_export("lookup.StaticHashTable", v1=[])
class StaticHashTable(InitializableLookupTableBase):
"""A generic hash table that is immutable once initialized.
Example usage:
```python
keys_tensor = tf.constant([1, 2])
vals_tensor = tf.constant([3, 4])
input_tensor = tf.constant([1, 5])
table = tf.lookup.StaticHashTable(
tf.lookup.KeyValueTensorInitializer(keys_tensor, vals_tensor), -1)
print(table.lookup(input_tensor))
```
"""
def __init__(self, initializer, default_value, name=None):
"""Creates a non-initialized `HashTable` object.
Creates a table, the type of its keys and values are specified by the
initializer.
Before using the table you will have to initialize it. After initialization
the table will be immutable.
Args:
initializer: The table initializer to use. See `HashTable` kernel for
supported key and value types.
default_value: The value to use if a key is missing in the table.
name: A name for the operation (optional).
Returns:
A `HashTable` object.
"""
self._initializer = initializer
self._default_value = default_value
self._shared_name = self._initializer._shared_name # pylint: disable=protected-access
if not self._shared_name:
# Force using a shared name so that StaticHashTable resources can be
# shared across different kernels. If no "shared_name" is set and
# "use_node_name_sharing" is False, then each kernel gets its own local
# resource.
self._shared_name = "hash_table_%s" % (str(uuid.uuid4()),)
self._name = name or "hash_table"
self._table_name = None
super(StaticHashTable, self).__init__(default_value, initializer)
self._value_shape = self._default_value.get_shape()
def _create_resource(self):
table_ref = gen_lookup_ops.hash_table_v2(
shared_name=self._shared_name,
key_dtype=self._initializer.key_dtype,
value_dtype=self._initializer.value_dtype,
name=self._name)
if context.executing_eagerly():
self._table_name = None
else:
self._table_name = table_ref.op.name.split("/")[-1]
return table_ref
@property
def name(self):
return self._table_name
def export(self, name=None):
"""Returns tensors of all keys and values in the table.
Args:
name: A name for the operation (optional).
Returns:
A pair of tensors with the first tensor containing all keys and the
second tensors containing all values in the table.
"""
with ops.name_scope(name, "%s_Export" % self.name, [self.resource_handle]):
exported_keys, exported_values = gen_lookup_ops.lookup_table_export_v2(
self.resource_handle, self._key_dtype, self._value_dtype)
exported_values.set_shape(exported_keys.get_shape().concatenate(
self._value_shape))
return exported_keys, exported_values
@tf_export(v1=["lookup.StaticHashTable"])
class StaticHashTableV1(StaticHashTable):
"""A generic hash table that is immutable once initialized.
When running in graph mode, you must evaluate the tensor returned by
`tf.tables_initializer()` before evaluating the tensor returned by
this class's `lookup()` method. Example usage in graph mode:
```python
keys_tensor = tf.constant([1, 2])
vals_tensor = tf.constant([3, 4])
input_tensor = tf.constant([1, 5])
table = tf.lookup.StaticHashTable(
tf.lookup.KeyValueTensorInitializer(keys_tensor, vals_tensor), -1)
out = table.lookup(input_tensor)
with tf.Session() as sess:
sess.run(tf.tables_initializer())
print(sess.run(out))
```
In eager mode, no special code is needed to initialize the table.
Example usage in eager mode:
```python
tf.enable_eager_execution()
keys_tensor = tf.constant([1, 2])
vals_tensor = tf.constant([3, 4])
input_tensor = tf.constant([1, 5])
table = tf.lookup.StaticHashTable(
tf.lookup.KeyValueTensorInitializer(keys_tensor, vals_tensor), -1)
print(table.lookup(input_tensor))
```
"""
@property
def initializer(self):
return self._init_op
# For backwards compatibility. This will be removed in TF 2.0.
class HashTable(StaticHashTableV1):
@property
def init(self):
return self.initializer
class TableInitializerBase(trackable_base.Trackable):
"""Base class for lookup table initializers."""
def __init__(self, key_dtype, value_dtype):
"""Construct a table initializer object.
Args:
key_dtype: Type of the table keys.
value_dtype: Type of the table values.
"""
self._key_dtype = dtypes.as_dtype(key_dtype)
self._value_dtype = dtypes.as_dtype(value_dtype)
@property
def key_dtype(self):
"""The expected table key dtype."""
return self._key_dtype
@property
def value_dtype(self):
"""The expected table value dtype."""
return self._value_dtype
def initialize(self, table):
"""Returns the table initialization op."""
raise NotImplementedError
@property
def _shared_name(self):
"""Returns a shared name to be used by the table."""
shared_name = ""
if context.executing_eagerly():
# Ensure a unique name when eager execution is enabled to avoid spurious
# sharing issues.
# TODO(rohanj): Use context.shared_name() instead.
shared_name += str(ops.uid())
return shared_name
@tf_export("lookup.KeyValueTensorInitializer")
class KeyValueTensorInitializer(TableInitializerBase):
"""Table initializers given `keys` and `values` tensors."""
def __init__(self, keys, values, key_dtype=None, value_dtype=None, name=None):
"""Constructs a table initializer object based on keys and values tensors.
Args:
keys: The tensor for the keys.
values: The tensor for the values.
key_dtype: The `keys` data type. Used when `keys` is a python array.
value_dtype: The `values` data type. Used when `values` is a python array.
name: A name for the operation (optional).
"""
if (not context.executing_eagerly() and
ops.get_default_graph()._get_control_flow_context() is not None): # pylint: disable=protected-access
with ops.init_scope():
self._keys = ops.convert_to_tensor(keys, dtype=key_dtype, name="keys")
self._values = ops.convert_to_tensor(
values, dtype=value_dtype, name="values")
else:
self._keys = ops.convert_to_tensor(keys, dtype=key_dtype, name="keys")
self._values = ops.convert_to_tensor(
values, dtype=value_dtype, name="values")
self._name = name if name is not None else "key_value_init"
if context.executing_eagerly():
# Ensure a unique name when eager execution is enabled to avoid spurious
# sharing issues.
# TODO(rohanj): Use context.shared_name() instead.
self._name += str(ops.uid())
super(KeyValueTensorInitializer, self).__init__(self._keys.dtype,
self._values.dtype)
def initialize(self, table):
"""Initializes the given `table` with `keys` and `values` tensors.
Args:
table: The table to initialize.
Returns:
The operation that initializes the table.
Raises:
TypeError: when the keys and values data types do not match the table
key and value data types.
"""
_check_table_dtypes(table, self._keys.dtype, self._values.dtype)
with ops.name_scope(
self._name, values=(table.resource_handle, self._keys, self._values)):
if fwd_compat.forward_compatible(2018, 9, 19):
init_op = gen_lookup_ops.lookup_table_import_v2(table.resource_handle,
self._keys,
self._values)
else:
# To maintain forward compatibiltiy, use the old implementation.
init_op = gen_lookup_ops.initialize_table_v2(table.resource_handle,
self._keys, self._values)
ops.add_to_collection(ops.GraphKeys.TABLE_INITIALIZERS, init_op)
return init_op
@tf_export("lookup.TextFileIndex")
class TextFileIndex(object):
"""The key and value content to get from each line.
This class defines the key and value used for tf.lookup.TextFileInitializer.
The key and value content to get from each line is specified either
by the following, or a value `>=0`.
* `TextFileIndex.LINE_NUMBER` means use the line number starting from zero,
expects data type int64.
* `TextFileIndex.WHOLE_LINE` means use the whole line content, expects data
type string.
A value `>=0` means use the index (starting at zero) of the split line based
on `delimiter`.
"""
WHOLE_LINE = -2
LINE_NUMBER = -1
@tf_export("lookup.TextFileInitializer")
class TextFileInitializer(TableInitializerBase):
"""Table initializers from a text file.
This initializer assigns one entry in the table for each line in the file.
The key and value type of the table to initialize is given by `key_dtype` and
`value_dtype`.
The key and value content to get from each line is specified by
the `key_index` and `value_index`.
* `TextFileIndex.LINE_NUMBER` means use the line number starting from zero,
expects data type int64.
* `TextFileIndex.WHOLE_LINE` means use the whole line content, expects data
type string.
* A value `>=0` means use the index (starting at zero) of the split line based
on `delimiter`.
For example if we have a file with the following content:
```
emerson 10
lake 20
palmer 30
```
The following snippet initializes a table with the first column as keys and
second column as values:
* `emerson -> 10`
* `lake -> 20`
* `palmer -> 30`
```python
table = tf.lookup.StaticHashTable(tf.lookup.TextFileInitializer(
"test.txt", tf.string, 0, tf.int64, 1, delimiter=" "), -1)
...
table.init.run()
```
Similarly to initialize the whole line as keys and the line number as values.
* `emerson 10 -> 0`
* `lake 20 -> 1`
* `palmer 30 -> 2`
```python
table = tf.lookup.StaticHashTable(tf.lookup.TextFileInitializer(
"test.txt", tf.string, tf.lookup.TextFileIndex.WHOLE_LINE,
tf.int64, tf.lookup.TextFileIndex.LINE_NUMBER, delimiter=" "), -1)
...
table.init.run()
```
"""
def __init__(self,
filename,
key_dtype,
key_index,
value_dtype,
value_index,
vocab_size=None,
delimiter="\t",
name=None):
"""Constructs a table initializer object to populate from a text file.
It generates one key-value pair per line. The type of table key and
value are specified by `key_dtype` and `value_dtype`, respectively.
Similarly the content of the key and value are specified by the key_index
and value_index.
- TextFileIndex.LINE_NUMBER means use the line number starting from zero,
expects data type int64.
- TextFileIndex.WHOLE_LINE means use the whole line content, expects data
type string.
- A value >=0 means use the index (starting at zero) of the split line based
on `delimiter`.
Args:
filename: The filename of the text file to be used for initialization. The
path must be accessible from wherever the graph is initialized (eg.
trainer or eval workers). The filename may be a scalar `Tensor`.
key_dtype: The `key` data type.
key_index: the index that represents information of a line to get the
table 'key' values from.
value_dtype: The `value` data type.
value_index: the index that represents information of a line to get the
table 'value' values from.'
vocab_size: The number of elements in the file, if known.
delimiter: The delimiter to separate fields in a line.
name: A name for the operation (optional).
Raises:
ValueError: when the filename is empty, or when the table key and value
data types do not match the expected data types.
"""
if not isinstance(filename, ops.Tensor) and not filename:
raise ValueError("Filename required for %s." % name)
self._filename_arg = filename
key_dtype = dtypes.as_dtype(key_dtype)
value_dtype = dtypes.as_dtype(value_dtype)
if key_index < -2:
raise ValueError("Invalid key index %s." % (key_index))
if key_index == TextFileIndex.LINE_NUMBER and key_dtype != dtypes.int64:
raise ValueError("Signature mismatch. Keys must be dtype %s, got %s." %
(dtypes.int64, key_dtype))
if ((key_index == TextFileIndex.WHOLE_LINE) and
(not key_dtype.is_integer) and (key_dtype != dtypes.string)):
raise ValueError(
"Signature mismatch. Keys must be integer or string, got %s." %
key_dtype)
if value_index < -2:
raise ValueError("Invalid value index %s." % (value_index))
if value_index == TextFileIndex.LINE_NUMBER and value_dtype != dtypes.int64:
raise ValueError("Signature mismatch. Values must be dtype %s, got %s." %
(dtypes.int64, value_dtype))
if value_index == TextFileIndex.WHOLE_LINE and value_dtype != dtypes.string:
raise ValueError("Signature mismatch. Values must be dtype %s, got %s." %
(dtypes.string, value_dtype))
if (vocab_size is not None) and (vocab_size <= 0):
raise ValueError("Invalid vocab_size %s." % vocab_size)
self._key_index = key_index
self._value_index = value_index
self._vocab_size = vocab_size
self._delimiter = delimiter
self._name = name
self._filename = self._track_trackable(
trackable.TrackableAsset(filename), "_filename")
super(TextFileInitializer, self).__init__(key_dtype, value_dtype)
def initialize(self, table):
"""Initializes the table from a text file.
Args:
table: The table to be initialized.
Returns:
The operation that initializes the table.
Raises:
TypeError: when the keys and values data types do not match the table
key and value data types.
"""
_check_table_dtypes(table, self.key_dtype, self.value_dtype)
with ops.name_scope(self._name, "text_file_init", (table.resource_handle,)):
filename = ops.convert_to_tensor(
self._filename, dtypes.string, name="asset_filepath")
init_op = gen_lookup_ops.initialize_table_from_text_file_v2(
table.resource_handle, filename, self._key_index, self._value_index,
-1 if self._vocab_size is None else self._vocab_size, self._delimiter)
ops.add_to_collection(ops.GraphKeys.TABLE_INITIALIZERS, init_op)
# If the filename tensor is anything other than a string constant (e.g.,
# if it is a placeholder) then it does not make sense to track it as an
# asset.
if not context.executing_eagerly() and constant_op.is_constant(filename):
ops.add_to_collection(ops.GraphKeys.ASSET_FILEPATHS, filename)
return init_op
@property
def _shared_name(self):
if self._vocab_size:
# Keep the shared_name:
# <table_type>_<filename>_<vocab_size>_<key_index>_<value_index>
shared_name = "hash_table_%s_%d_%s_%s" % (
self._filename_arg, self._vocab_size, self._key_index,
self._value_index)
else:
# Keep the shared_name
# <table_type>_<filename>_<key_index>_<value_index>
shared_name = "hash_table_%s_%s_%s" % (self._filename_arg,
self._key_index, self._value_index)
return shared_name
class TextFileStringTableInitializer(TextFileInitializer):
"""Table initializer for `int64` IDs to string tables from a text file."""
def __init__(self,
filename,
key_column_index=TextFileIndex.LINE_NUMBER,
value_column_index=TextFileIndex.WHOLE_LINE,
vocab_size=None,
delimiter="\t",
name="text_file_string_table_init"):
"""Constructs an initializer for an id-to-string table from a text file.
It populates a table that its key and value types are int64 and string,
respectively. It generates one key-value pair per line.
The content of the key and value are specified by `key_column_index`
and `value_column_index`.
- TextFileIndex.LINE_NUMBER means use the line number starting from zero,
expects data type int64.
- TextFileIndex.WHOLE_LINE means use the whole line content, expects data
type string.
- A value >=0 means use the index (starting at zero) of the split line based
on `delimiter`.
Args:
filename: The filename of the text file to be used for initialization. The
path must be accessible from wherever the graph is initialized (eg.
trainer or eval workers). The filename may be a scalar `Tensor`.
key_column_index: The column index from the text file to get the keys
from. The default is to use the line number, starting from zero.
value_column_index: The column index from the text file to get the values
from. The default is to use the whole line content.
vocab_size: The number of elements in the file, if known.
delimiter: The delimiter to separate fields in a line.
name: Optional name for the op.
Raises:
TypeError: when the filename is empty, or when the table key and value
data types do not match the expected data types.
"""
super(TextFileStringTableInitializer, self).__init__(
filename,
dtypes.int64,
key_column_index,
dtypes.string,
value_column_index,
vocab_size=vocab_size,
delimiter=delimiter,
name=name)
class TextFileIdTableInitializer(TextFileInitializer):
"""Table initializer for string to `int64` IDs tables from a text file."""
def __init__(self,
filename,
key_column_index=TextFileIndex.WHOLE_LINE,
value_column_index=TextFileIndex.LINE_NUMBER,
vocab_size=None,
delimiter="\t",
name="text_file_id_table_init",
key_dtype=dtypes.string):
"""Constructs an initializer for an string-to-id table from a text file.
It populates a table that its key and value types are string and int64,
respectively. It generates one key-value pair per line.
The content of the key and value are specified by the key_index
and value_index.
- TextFileIndex.LINE_NUMBER means use the line number starting from zero,
expects data type int64.
- TextFileIndex.WHOLE_LINE means use the whole line content, expects data
type string.
- A value >=0 means use the index (starting at zero) of the split line based
on `delimiter`.
Args:
filename: The filename of the text file to be used for initialization. The
path must be accessible from wherever the graph is initialized (eg.
trainer or eval workers). The filename may be a scalar `Tensor`.
key_column_index: The column index from the text file to get the `key`
values from. The default is to use the whole line content.
value_column_index: The column index from the text file to get the `value`
values from. The default is to use the line number, starting from zero.
vocab_size: The number of elements in the file, if known.
delimiter: The delimiter to separate fields in a line.
name: Optional name for the op.
key_dtype: The `key` data type.
Raises:
TypeError: when the filename is empty, or when the table key and value
data types do not match the expected data types.
"""
super(TextFileIdTableInitializer, self).__init__(
filename,
key_dtype,
key_column_index,
dtypes.int64,
value_column_index,
vocab_size=vocab_size,
delimiter=delimiter,
name=name)
class HasherSpec(collections.namedtuple("HasherSpec", ["hasher", "key"])):
"""A structure for the spec of the hashing function to use for hash buckets.
`hasher` is the name of the hashing function to use (eg. "fasthash",
"stronghash").
`key` is optional and specify the key to use for the hash function if
supported, currently only used by a strong hash.
Fields:
hasher: The hasher name to use.
key: The key to be used by the hashing function, if required.
"""
__slots__ = ()
FastHashSpec = HasherSpec("fasthash", None) # pylint: disable=invalid-name
class StrongHashSpec(HasherSpec):
"""A structure to specify a key of the strong keyed hash spec.
The strong hash requires a `key`, which is a list of 2 unsigned integer
numbers. These should be non-zero; random numbers generated from random.org
would be a fine choice.
Fields:
key: The key to be used by the keyed hashing function.
"""
__slots__ = ()
def __new__(cls, key):
if len(key) != 2:
raise ValueError("key must have size 2, got %s." % len(key))
if not isinstance(key[0], compat.integral_types) or not isinstance(
key[1], compat.integral_types):
raise TypeError("Invalid key %s. Must be unsigned integer values." % key)
return super(cls, StrongHashSpec).__new__(cls, "stronghash", key)
def _as_string(tensor):
if dtypes.string == tensor.dtype.base_dtype:
return tensor
return string_ops.as_string(tensor)
class IdTableWithHashBuckets(LookupInterface):
"""String to Id table wrapper that assigns out-of-vocabulary keys to buckets.
For example, if an instance of `IdTableWithHashBuckets` is initialized with a
string-to-id table that maps:
* `emerson -> 0`
* `lake -> 1`
* `palmer -> 2`
The `IdTableWithHashBuckets` object will performs the following mapping:
* `emerson -> 0`
* `lake -> 1`
* `palmer -> 2`
* `<other term> -> bucket_id`, where bucket_id will be between `3` and
`3 + num_oov_buckets - 1`, calculated by:
`hash(<term>) % num_oov_buckets + vocab_size`
If input_tensor is `["emerson", "lake", "palmer", "king", "crimson"]`,
the lookup result is `[0, 1, 2, 4, 7]`.
If `table` is None, only out-of-vocabulary buckets are used.
Example usage:
```python
num_oov_buckets = 3
input_tensor = tf.constant(["emerson", "lake", "palmer", "king", "crimnson"])
table = tf.IdTableWithHashBuckets(
tf.StaticHashTable(tf.TextFileIdTableInitializer(filename),
default_value),
num_oov_buckets)
out = table.lookup(input_tensor).
table.init.run()
print(out.eval())
```
The hash function used for generating out-of-vocabulary buckets ID is handled
by `hasher_spec`.
"""
def __init__(self,
table,
num_oov_buckets,
hasher_spec=FastHashSpec,
name=None,
key_dtype=None):
"""Construct a `IdTableWithHashBuckets` object.
Args:
table: Table that maps `tf.string` or `tf.int64` keys to `tf.int64` ids.
num_oov_buckets: Number of buckets to use for out-of-vocabulary keys.
hasher_spec: A `HasherSpec` to specify the hash function to use for
assignation of out-of-vocabulary buckets (optional).
name: A name for the operation (optional).
key_dtype: Data type of keys passed to `lookup`. Defaults to
`table.key_dtype` if `table` is specified, otherwise `tf.string`. Must
be string or integer, and must be castable to `table.key_dtype`.
Raises:
ValueError: when `table` in None and `num_oov_buckets` is not positive.
TypeError: when `hasher_spec` is invalid.
"""
# If a name ends with a '/' it is a "name scope", remove all trailing '/'
# characters to use as table name.
if name:
name = name.rstrip("/")
if table:
if key_dtype is None:
key_dtype = table.key_dtype
supported_table_key_dtypes = (dtypes.int64, dtypes.string)
if table.key_dtype not in supported_table_key_dtypes:
raise TypeError("Invalid key dtype, expected one of %s, but got %s." %
(supported_table_key_dtypes, key_dtype))
if table.key_dtype.is_integer != key_dtype.is_integer:
raise TypeError("Invalid key dtype, expected %s but got %s." %
("integer" if key_dtype.is_integer else "non-integer",
table.key_dtype))
if table.value_dtype != dtypes.int64:
raise TypeError("Invalid value dtype, expected %s but got %s." %
(dtypes.int64, table.value_dtype))
self._table = table
name = name or self._table.name
else:
if num_oov_buckets <= 0:
raise ValueError("oov_buckets must be > 0 if no table is supplied.")
key_dtype = dtypes.string if key_dtype is None else key_dtype
self._table = None
name = name or "hash_bucket"
if (not key_dtype.is_integer) and (dtypes.string != key_dtype):
raise TypeError("Invalid key_dtype, expected integer or string, got %s." %
key_dtype)
self._num_oov_buckets = num_oov_buckets
if not isinstance(hasher_spec, HasherSpec):
raise TypeError("hasher_spec must be of type HasherSpec, got %s" %
hasher_spec)
self._hasher_spec = hasher_spec
if name:
self._table_name = name.split("/")[-1]
else:
self._table_name = None
super(IdTableWithHashBuckets, self).__init__(key_dtype, dtypes.int64)
def _create_resource(self):
if self._table is not None:
return self._table._create_resource() # pylint: disable=protected-access
return None
def _initialize(self):
if self._table is not None:
return self._table._initialize() # pylint: disable=protected-access
with ops.name_scope(None, "init"):
return control_flow_ops.no_op()
@property
def initializer(self):
if self._table is not None:
return self._table._init_op # pylint: disable=protected-access
with ops.name_scope(None, "init"):
return control_flow_ops.no_op()
@property
@deprecated("2018-12-15", "Use `initializer` instead.")
def init(self):
return self.initializer
@property
def resource_handle(self):
if self._table is not None:
return self._table.resource_handle
return None
@property
def name(self):
return self._table_name
def size(self, name=None):
"""Compute the number of elements in this table."""
with ops.name_scope(name, "%s_Size" % self.name):
if self._table:
tsize = self._table.size()
else:
tsize = ops.convert_to_tensor(0, dtype=dtypes.int64)
return tsize + self._num_oov_buckets
def _get_string_to_hash_bucket_fn(self, hasher_spec):
"""Returns the string_to_hash_bucket op to use based on `hasher_spec`."""
if not isinstance(hasher_spec, HasherSpec):
raise TypeError("hasher_spec must be of type HasherSpec %s" % hasher_spec)
if hasher_spec.hasher == "fasthash":
return string_ops.string_to_hash_bucket_fast
if hasher_spec.hasher == "legacy":
return string_ops.string_to_hash_bucket
if hasher_spec.hasher == "stronghash":
return functools.partial(
string_ops.string_to_hash_bucket_strong, key=hasher_spec.key)
raise ValueError("Unknown hasher %s" % hasher_spec.hasher)
def lookup(self, keys, name=None):
"""Looks up `keys` in the table, outputs the corresponding values.
It assigns out-of-vocabulary keys to buckets based in their hashes.
Args:
keys: Keys to look up. May be either a `SparseTensor` or dense `Tensor`.
name: Optional name for the op.
Returns:
A `SparseTensor` if keys are sparse, otherwise a dense `Tensor`.
Raises:
TypeError: when `keys` doesn't match the table key data type.
"""
if keys.dtype.base_dtype != self._key_dtype:
raise TypeError("Signature mismatch. Keys must be dtype %s, got %s." %
(self._key_dtype, keys.dtype))
values = keys
if isinstance(keys, sparse_tensor.SparseTensor):
values = keys.values
if self._table and (self._table.key_dtype.base_dtype == dtypes.int64):
values = math_ops.cast(values, dtypes.int64)
if self._num_oov_buckets == 0:
ids = self._table.lookup(values, name=name)
else:
# TODO(yleon): Consider moving this functionality to its own kernel.
with ops.name_scope(name, "%s_Lookup" % self.name):
str_to_hash_bucket = self._get_string_to_hash_bucket_fn(
self._hasher_spec)
buckets = str_to_hash_bucket(
_as_string(values),
num_buckets=self._num_oov_buckets,
name="hash_bucket")
if self._table:
ids = self._table.lookup(values)
buckets = math_ops.add(buckets, self._table.size())
is_id_non_default = math_ops.not_equal(ids, self._table.default_value)
ids = array_ops.where_v2(is_id_non_default, ids, buckets)
else:
ids = buckets
if isinstance(keys, sparse_tensor.SparseTensor):
return sparse_tensor.SparseTensor(keys.indices, ids, keys.dense_shape)
return ids
@tf_export("lookup.StaticVocabularyTable", v1=[])
class StaticVocabularyTable(LookupInterface):
"""String to Id table wrapper that assigns out-of-vocabulary keys to buckets.
For example, if an instance of `StaticVocabularyTable` is initialized with a
string-to-id initializer that maps:
* `emerson -> 0`
* `lake -> 1`
* `palmer -> 2`
The `Vocabulary` object will performs the following mapping:
* `emerson -> 0`
* `lake -> 1`
* `palmer -> 2`
* `<other term> -> bucket_id`, where bucket_id will be between `3` and
`3 + num_oov_buckets - 1`, calculated by:
`hash(<term>) % num_oov_buckets + vocab_size`
If input_tensor is `["emerson", "lake", "palmer", "king", "crimson"]`,
the lookup result is `[0, 1, 2, 4, 7]`.
If `initializer` is None, only out-of-vocabulary buckets are used.
Example usage:
```python
num_oov_buckets = 3
input_tensor = tf.constant(["emerson", "lake", "palmer", "king", "crimnson"])
table = tf.lookup.StaticVocabularyTable(
tf.TextFileIdTableInitializer(filename), num_oov_buckets)
out = table.lookup(input_tensor).
table.init.run()
print(out.eval())
```
The hash function used for generating out-of-vocabulary buckets ID is
Fingerprint64.
"""
def __init__(self,
initializer,
num_oov_buckets,
lookup_key_dtype=None,
name=None):
"""Construct a `StaticVocabularyTable` object.
Args:
initializer: A TableInitializerBase object that contains the data used to
initialize the table. If None, then we only use out-of-vocab buckets.
num_oov_buckets: Number of buckets to use for out-of-vocabulary keys. Must
be greater than zero.
lookup_key_dtype: Data type of keys passed to `lookup`. Defaults to
`initializer.key_dtype` if `initializer` is specified, otherwise
`tf.string`. Must be string or integer, and must be castable to
`initializer.key_dtype`.
name: A name for the operation (optional).
Raises:
ValueError: when `num_oov_buckets` is not positive.
TypeError: when lookup_key_dtype or initializer.key_dtype are not
integer or string. Also when initializer.value_dtype != int64.
"""
if num_oov_buckets <= 0:
raise ValueError("oov_buckets must be > 0.")
# If a name ends with a '/' it is a "name scope", remove all trailing '/'
# characters to use as table name.
if name:
name = name.rstrip("/")
if initializer:
if lookup_key_dtype is None:
lookup_key_dtype = initializer.key_dtype
supported_table_key_dtypes = (dtypes.int64, dtypes.string)
if initializer.key_dtype not in supported_table_key_dtypes:
raise TypeError("Invalid key dtype, expected one of %s, but got %s." %
(supported_table_key_dtypes, initializer.key_dtype))
if initializer.key_dtype.is_integer != lookup_key_dtype.is_integer:
raise TypeError(
"Invalid key dtype, expected %s but got %s." %
("integer" if lookup_key_dtype.is_integer else "non-integer",
initializer.key_dtype))
if initializer.value_dtype != dtypes.int64:
raise TypeError("Invalid value dtype, expected %s but got %s." %
(dtypes.int64, initializer.value_dtype))
self._table = HashTable(initializer, default_value=-1)
name = name or self._table.name
else:
lookup_key_dtype = dtypes.string
self._table = None
name = name or "hash_bucket"
if (not lookup_key_dtype.is_integer) and (dtypes.string !=
lookup_key_dtype):
raise TypeError("Invalid key_dtype, expected integer or string, got %s." %
lookup_key_dtype)
self._num_oov_buckets = num_oov_buckets
self._table_name = None
if name is not None:
self._table_name = name.split("/")[-1]
super(StaticVocabularyTable, self).__init__(lookup_key_dtype, dtypes.int64)
def _create_resource(self):
if self._table is not None:
return self._table._create_resource() # pylint: disable=protected-access
return None
def _initialize(self):
if self._table is not None:
return self._table._initialize() # pylint: disable=protected-access
with ops.name_scope(None, "init"):
return control_flow_ops.no_op()
@property
def resource_handle(self):
if self._table is not None:
return self._table.resource_handle
return None
@property
def name(self):
return self._table_name
def size(self, name=None):
"""Compute the number of elements in this table."""
with ops.name_scope(name, "%s_Size" % self.name):
if self._table:
tsize = self._table.size()
else:
tsize = ops.convert_to_tensor(0, dtype=dtypes.int64)
return tsize + self._num_oov_buckets
def lookup(self, keys, name=None):
"""Looks up `keys` in the table, outputs the corresponding values.
It assigns out-of-vocabulary keys to buckets based in their hashes.
Args:
keys: Keys to look up. May be either a `SparseTensor` or dense `Tensor`.
name: Optional name for the op.
Returns:
A `SparseTensor` if keys are sparse, otherwise a dense `Tensor`.
Raises:
TypeError: when `keys` doesn't match the table key data type.
"""
if keys.dtype.base_dtype != self._key_dtype:
raise TypeError("Signature mismatch. Keys must be dtype %s, got %s." %
(self._key_dtype, keys.dtype))
values = keys
if isinstance(keys, sparse_tensor.SparseTensor):
values = keys.values
if self._table and (self._table.key_dtype.base_dtype == dtypes.int64):
values = math_ops.cast(values, dtypes.int64)
# TODO(yleon): Consider moving this functionality to its own kernel.
with ops.name_scope(name, "%s_Lookup" % self.name):
buckets = string_ops.string_to_hash_bucket_fast(
_as_string(values),
num_buckets=self._num_oov_buckets,
name="hash_bucket")
if self._table:
ids = self._table.lookup(values)
buckets = math_ops.add(buckets, self._table.size())
is_id_non_default = math_ops.not_equal(ids, self._table.default_value)
ids = array_ops.where_v2(is_id_non_default, ids, buckets)
else:
ids = buckets
if isinstance(keys, sparse_tensor.SparseTensor):
return sparse_tensor.SparseTensor(keys.indices, ids, keys.dense_shape)
return ids
@tf_export(v1=["lookup.StaticVocabularyTable"])
class StaticVocabularyTableV1(StaticVocabularyTable):
@property
def initializer(self):
if self._table is not None:
return self._table._init_op # pylint: disable=protected-access
with ops.name_scope(None, "init"):
return control_flow_ops.no_op()
def index_table_from_file(vocabulary_file=None,
num_oov_buckets=0,
vocab_size=None,
default_value=-1,
hasher_spec=FastHashSpec,
key_dtype=dtypes.string,
name=None,
key_column_index=TextFileIndex.WHOLE_LINE,
value_column_index=TextFileIndex.LINE_NUMBER,
delimiter="\t"):
"""Returns a lookup table that converts a string tensor into int64 IDs.
This operation constructs a lookup table to convert tensor of strings into
int64 IDs. The mapping can be initialized from a vocabulary file specified in
`vocabulary_file`, where the whole line is the key and the zero-based line
number is the ID.
Any lookup of an out-of-vocabulary token will return a bucket ID based on its
hash if `num_oov_buckets` is greater than zero. Otherwise it is assigned the
`default_value`.
The bucket ID range is
`[vocabulary size, vocabulary size + num_oov_buckets - 1]`.
The underlying table must be initialized by calling
`session.run(tf.compat.v1.tables_initializer())` or
`session.run(table.init())` once.
To specify multi-column vocabulary files, use key_column_index and
value_column_index and delimiter.
- TextFileIndex.LINE_NUMBER means use the line number starting from zero,
expects data type int64.
- TextFileIndex.WHOLE_LINE means use the whole line content, expects data
type string.
- A value >=0 means use the index (starting at zero) of the split line based
on `delimiter`.
Sample Usages:
If we have a vocabulary file "test.txt" with the following content:
```
emerson
lake
palmer
```
```python
features = tf.constant(["emerson", "lake", "and", "palmer"])
table = tf.lookup.index_table_from_file(
vocabulary_file="test.txt", num_oov_buckets=1)
ids = table.lookup(features)
...
tf.compat.v1.tables_initializer().run()
ids.eval() ==> [0, 1, 3, 2] # where 3 is the out-of-vocabulary bucket
```
Args:
vocabulary_file: The vocabulary filename, may be a constant scalar `Tensor`.
num_oov_buckets: The number of out-of-vocabulary buckets.
vocab_size: Number of the elements in the vocabulary, if known.
default_value: The value to use for out-of-vocabulary feature values.
Defaults to -1.
hasher_spec: A `HasherSpec` to specify the hash function to use for
assignation of out-of-vocabulary buckets.
key_dtype: The `key` data type.
name: A name for this op (optional).
key_column_index: The column index from the text file to get the `key`
values from. The default is to use the whole line content.
value_column_index: The column index from the text file to get the `value`
values from. The default is to use the line number, starting from zero.
delimiter: The delimiter to separate fields in a line.
Returns:
The lookup table to map a `key_dtype` `Tensor` to index `int64` `Tensor`.
Raises:
ValueError: If `vocabulary_file` is not set.
ValueError: If `num_oov_buckets` is negative or `vocab_size` is not greater
than zero.
"""
if vocabulary_file is None or (isinstance(vocabulary_file, six.string_types)
and not vocabulary_file):
raise ValueError("vocabulary_file must be specified and must not be empty.")
if num_oov_buckets < 0:
raise ValueError(
"num_oov_buckets must be greater or equal than 0, got %d." %
num_oov_buckets)
if vocab_size is not None and vocab_size < 1:
vocab_file_value = vocabulary_file
if isinstance(vocabulary_file, ops.Tensor):
vocab_file_value = tensor_util.constant_value(vocabulary_file) or "?"
raise ValueError("vocab_size must be greater than 0, got %d. "
"vocabulary_file: %s" % (vocab_size, vocab_file_value))
if (not key_dtype.is_integer) and (dtypes.string != key_dtype.base_dtype):
raise TypeError("Only integer and string keys are supported.")
with ops.name_scope(name, "string_to_index"):
table = None
with ops.name_scope(None, "hash_table"):
init = TextFileIdTableInitializer(
vocabulary_file,
vocab_size=vocab_size,
key_dtype=dtypes.int64 if key_dtype.is_integer else key_dtype,
name="table_init",
key_column_index=key_column_index,
value_column_index=value_column_index,
delimiter=delimiter)
table = StaticHashTableV1(init, default_value)
if num_oov_buckets:
table = IdTableWithHashBuckets(
table,
num_oov_buckets=num_oov_buckets,
hasher_spec=hasher_spec,
key_dtype=key_dtype)
return table
def index_table_from_tensor(vocabulary_list,
num_oov_buckets=0,
default_value=-1,
hasher_spec=FastHashSpec,
dtype=dtypes.string,
name=None):
"""Returns a lookup table that converts a string tensor into int64 IDs.
This operation constructs a lookup table to convert tensor of strings into
int64 IDs. The mapping can be initialized from a string `vocabulary_list` 1-D
tensor where each element is a key and corresponding index within the tensor
is the value.
Any lookup of an out-of-vocabulary token will return a bucket ID based on its
hash if `num_oov_buckets` is greater than zero. Otherwise it is assigned the
`default_value`. The bucket ID range is
`[vocabulary list size, vocabulary list size + num_oov_buckets - 1]`.
The underlying table must be initialized by calling
`session.run(tf.compat.v1.tables_initializer())` or
`session.run(table.init())` once.
Elements in `vocabulary_list` cannot have duplicates, otherwise when executing
the table initializer op, it will throw a `FailedPreconditionError`.
Sample Usages:
```python
vocabulary_list = tf.constant(["emerson", "lake", "palmer"])
table = tf.lookup.index_table_from_tensor(
vocabulary_list=vocabulary_list, num_oov_buckets=1, default_value=-1)
features = tf.constant(["emerson", "lake", "and", "palmer"])
ids = table.lookup(features)
...
tf.compat.v1.tables_initializer().run()
ids.eval() ==> [0, 1, 4, 2]
```
Args:
vocabulary_list: A 1-D `Tensor` that specifies the mapping of keys to
indices. The type of this object must be castable to `dtype`.
num_oov_buckets: The number of out-of-vocabulary buckets.
default_value: The value to use for out-of-vocabulary feature values.
Defaults to -1.
hasher_spec: A `HasherSpec` to specify the hash function to use for
assignment of out-of-vocabulary buckets.
dtype: The type of values passed to `lookup`. Only string and integers are
supported.
name: A name for this op (optional).
Returns:
The lookup table to map an input `Tensor` to index `int64` `Tensor`.
Raises:
ValueError: If `vocabulary_list` is invalid.
ValueError: If `num_oov_buckets` is negative.
"""
if vocabulary_list is None:
raise ValueError("vocabulary_list must be specified.")
if num_oov_buckets < 0:
raise ValueError(
"num_oov_buckets must be greater or equal than 0, got %d." %
num_oov_buckets)
if (not dtype.is_integer) and (dtypes.string != dtype.base_dtype):
raise TypeError("Only integer and string keys are supported.")
with ops.name_scope(name, "string_to_index"):
keys = ops.convert_to_tensor(vocabulary_list)
if keys.dtype.is_integer != dtype.is_integer:
raise ValueError(
"Expected %s, got %s." %
("integer" if dtype.is_integer else "non-integer", keys.dtype))
if (not dtype.is_integer) and (keys.dtype.base_dtype != dtype):
raise ValueError("Expected %s, got %s." % (dtype, keys.dtype))
num_elements = array_ops.size(keys)
values = math_ops.cast(math_ops.range(num_elements), dtypes.int64)
with ops.name_scope(None, "hash_table"):
table_keys = math_ops.cast(
keys, dtypes.int64) if keys.dtype.is_integer else keys
init = KeyValueTensorInitializer(
table_keys,
values,
table_keys.dtype.base_dtype,
dtypes.int64,
name="table_init")
table = StaticHashTableV1(init, default_value)
if num_oov_buckets:
table = IdTableWithHashBuckets(
table,
num_oov_buckets=num_oov_buckets,
hasher_spec=hasher_spec,
key_dtype=dtype)
return table
def index_to_string_table_from_file(vocabulary_file,
vocab_size=None,
default_value="UNK",
name=None,
key_column_index=TextFileIndex.LINE_NUMBER,
value_column_index=TextFileIndex.WHOLE_LINE,
delimiter="\t"):
"""Returns a lookup table that maps a `Tensor` of indices into strings.
This operation constructs a lookup table to map int64 indices into string
values. The table is initialized from a vocabulary file specified in
`vocabulary_file`, where the whole line is the value and the
zero-based line number is the index.
Any input which does not have a corresponding index in the vocabulary file
(an out-of-vocabulary entry) is assigned the `default_value`
The underlying table must be initialized by calling
`session.run(tf.compat.v1.tables_initializer())` or
`session.run(table.init())` once.
To specify multi-column vocabulary files, use key_column_index and
value_column_index and delimiter.
- TextFileIndex.LINE_NUMBER means use the line number starting from zero,
expects data type int64.
- TextFileIndex.WHOLE_LINE means use the whole line content, expects data
type string.
- A value >=0 means use the index (starting at zero) of the split line based
on `delimiter`.
Sample Usages:
If we have a vocabulary file "test.txt" with the following content:
```
emerson
lake
palmer
```
```python
indices = tf.constant([1, 5], tf.int64)
table = tf.lookup.index_to_string_table_from_file(
vocabulary_file="test.txt", default_value="UNKNOWN")
values = table.lookup(indices)
...
tf.compat.v1.tables_initializer().run()
values.eval() ==> ["lake", "UNKNOWN"]
```
Args:
vocabulary_file: The vocabulary filename, may be a constant scalar `Tensor`.
vocab_size: Number of the elements in the vocabulary, if known.
default_value: The value to use for out-of-vocabulary indices.
name: A name for this op (optional).
key_column_index: The column index from the text file to get the `key`
values from. The default is to use the line number, starting from zero.
value_column_index: The column index from the text file to get the `value`
values from. The default is to use the whole line content.
delimiter: The delimiter to separate fields in a line.
Returns:
The lookup table to map a string values associated to a given index `int64`
`Tensors`.
Raises:
ValueError: when `vocabulary_file` is empty.
ValueError: when `vocab_size` is invalid.
"""
if vocabulary_file is None or (isinstance(vocabulary_file, six.string_types)
and not vocabulary_file):
raise ValueError("vocabulary_file must be specified and must not be empty.")
if vocab_size is not None and vocab_size < 1:
raise ValueError("vocab_size must be greater than 0, got %d." % vocab_size)
with ops.name_scope(name, "index_to_string"):
init = TextFileStringTableInitializer(
vocabulary_file,
vocab_size=vocab_size,
name="table_init",
key_column_index=key_column_index,
value_column_index=value_column_index,
delimiter=delimiter)
# TODO(yleon): Use a more effienct structure.
return StaticHashTableV1(init, default_value)
def index_to_string_table_from_tensor(vocabulary_list,
default_value="UNK",
name=None):
"""Returns a lookup table that maps a `Tensor` of indices into strings.
This operation constructs a lookup table to map int64 indices into string
values. The mapping is initialized from a string `vocabulary_list` 1-D
`Tensor` where each element is a value and the corresponding index within the
tensor is the key.
Any input which does not have a corresponding index in 'vocabulary_list'
(an out-of-vocabulary entry) is assigned the `default_value`
The underlying table must be initialized by calling
`session.run(tf.compat.v1.tables_initializer())` or
`session.run(table.init())` once.
Elements in `vocabulary_list` cannot have duplicates, otherwise when executing
the table initializer op, it will throw a `FailedPreconditionError`.
Sample Usages:
```python
vocabulary_list = tf.constant(["emerson", "lake", "palmer"])
indices = tf.constant([1, 5], tf.int64)
table = tf.lookup.index_to_string_table_from_tensor(
vocabulary_list, default_value="UNKNOWN")
values = table.lookup(indices)
...
tf.compat.v1.tables_initializer().run()
values.eval() ==> ["lake", "UNKNOWN"]
```
Args:
vocabulary_list: A 1-D string `Tensor` that specifies the strings to map
from indices.
default_value: The value to use for out-of-vocabulary indices.
name: A name for this op (optional).
Returns:
The lookup table to map a string values associated to a given index `int64`
`Tensors`.
Raises:
ValueError: when `vocabulary_list` is not set.
"""
if vocabulary_list is None:
raise ValueError("vocabulary_list must be specified.")
with ops.name_scope(name, "index_to_string"):
vocabulary_list = ops.convert_to_tensor(vocabulary_list, dtypes.string)
num_elements = array_ops.size(vocabulary_list)
keys = math_ops.cast(math_ops.range(num_elements), dtypes.int64)
init = KeyValueTensorInitializer(
keys, vocabulary_list, dtypes.int64, dtypes.string, name="table_init")
# TODO(yleon): Use a more effienct structure.
return StaticHashTableV1(init, default_value)
class MutableHashTable(LookupInterface):
"""A generic mutable hash table implementation.
Data can be inserted by calling the insert method and removed by calling the
remove method. It does not support initialization via the init method.
Example usage:
```python
table = tf.lookup.MutableHashTable(key_dtype=tf.string, value_dtype=tf.int64,
default_value=-1)
sess.run(table.insert(keys, values))
out = table.lookup(query_keys)
print(out.eval())
```
"""
def __init__(self,
key_dtype,
value_dtype,
default_value,
name="MutableHashTable",
checkpoint=True):
"""Creates an empty `MutableHashTable` object.
Creates a table, the type of its keys and values are specified by key_dtype
and value_dtype, respectively.
Args:
key_dtype: the type of the key tensors.
value_dtype: the type of the value tensors.
default_value: The value to use if a key is missing in the table.
name: A name for the operation (optional).
checkpoint: if True, the contents of the table are saved to and restored
from checkpoints. If `shared_name` is empty for a checkpointed table, it
is shared using the table node name.
Returns:
A `MutableHashTable` object.
Raises:
ValueError: If checkpoint is True and no name was specified.
"""
self._default_value = ops.convert_to_tensor(
default_value, dtype=value_dtype)
self._value_shape = self._default_value.get_shape()
self._checkpoint = checkpoint
self._key_dtype = key_dtype
self._value_dtype = value_dtype
self._name = name
self._shared_name = None
if context.executing_eagerly():
# TODO(allenl): This will leak memory due to kernel caching by the
# shared_name attribute value (but is better than the alternative of
# sharing everything by default when executing eagerly; hopefully creating
# tables in a loop is uncommon).
# TODO(rohanj): Use context.shared_name() instead.
self._shared_name = "table_%d" % (ops.uid(),)
super(MutableHashTable, self).__init__(key_dtype, value_dtype)
self._resource_handle = self._create_resource()
if checkpoint:
saveable = MutableHashTable._Saveable(self, name)
if not context.executing_eagerly():
ops.add_to_collection(ops.GraphKeys.SAVEABLE_OBJECTS, saveable)
def _create_resource(self):
# The table must be shared if checkpointing is requested for multi-worker
# training to work correctly. Use the node name if no shared_name has been
# explicitly specified.
use_node_name_sharing = self._checkpoint and self._shared_name is None
if self._default_value.get_shape().ndims == 0:
table_ref = gen_lookup_ops.mutable_hash_table_v2(
shared_name=self._shared_name,
use_node_name_sharing=use_node_name_sharing,
key_dtype=self._key_dtype,
value_dtype=self._value_dtype,
name=self._name)
else:
table_ref = gen_lookup_ops.mutable_hash_table_of_tensors_v2(
shared_name=self._shared_name,
use_node_name_sharing=use_node_name_sharing,
key_dtype=self._key_dtype,
value_dtype=self._value_dtype,
value_shape=self._default_value.get_shape(),
name=self._name)
if context.executing_eagerly():
self._table_name = None
else:
self._table_name = table_ref.op.name.split("/")[-1]
return table_ref
@property
def name(self):
return self._table_name
def size(self, name=None):
"""Compute the number of elements in this table.
Args:
name: A name for the operation (optional).
Returns:
A scalar tensor containing the number of elements in this table.
"""
with ops.name_scope(name, "%s_Size" % self.name, [self.resource_handle]):
with ops.colocate_with(self.resource_handle):
return gen_lookup_ops.lookup_table_size_v2(self.resource_handle)
def remove(self, keys, name=None):
"""Removes `keys` and its associated values from the table.
If a key is not present in the table, it is silently ignored.
Args:
keys: Keys to remove. Can be a tensor of any shape. Must match the table's
key type.
name: A name for the operation (optional).
Returns:
The created Operation.
Raises:
TypeError: when `keys` do not match the table data types.
"""
if keys.dtype != self._key_dtype:
raise TypeError("Signature mismatch. Keys must be dtype %s, got %s." %
(self._key_dtype, keys.dtype))
with ops.name_scope(name, "%s_lookup_table_remove" % self.name,
(self.resource_handle, keys, self._default_value)):
op = gen_lookup_ops.lookup_table_remove_v2(self.resource_handle, keys)
return op
def lookup(self, keys, name=None):
"""Looks up `keys` in a table, outputs the corresponding values.
The `default_value` is used for keys not present in the table.
Args:
keys: Keys to look up. Can be a tensor of any shape. Must match the
table's key_dtype.
name: A name for the operation (optional).
Returns:
A tensor containing the values in the same shape as `keys` using the
table's value type.
Raises:
TypeError: when `keys` do not match the table data types.
"""
with ops.name_scope(name, "%s_lookup_table_find" % self.name,
(self.resource_handle, keys, self._default_value)):
keys = ops.convert_to_tensor(keys, dtype=self._key_dtype, name="keys")
with ops.colocate_with(self.resource_handle):
values = gen_lookup_ops.lookup_table_find_v2(self.resource_handle, keys,
self._default_value)
return values
def insert(self, keys, values, name=None):
"""Associates `keys` with `values`.
Args:
keys: Keys to insert. Can be a tensor of any shape. Must match the table's
key type.
values: Values to be associated with keys. Must be a tensor of the same
shape as `keys` and match the table's value type.
name: A name for the operation (optional).
Returns:
The created Operation.
Raises:
TypeError: when `keys` or `values` doesn't match the table data
types.
"""
with ops.name_scope(name, "%s_lookup_table_insert" % self.name,
[self.resource_handle, keys, values]):
keys = ops.convert_to_tensor(keys, self._key_dtype, name="keys")
values = ops.convert_to_tensor(values, self._value_dtype, name="values")
with ops.colocate_with(self.resource_handle):
# pylint: disable=protected-access
op = gen_lookup_ops.lookup_table_insert_v2(self.resource_handle, keys,
values)
return op
def export(self, name=None):
"""Returns tensors of all keys and values in the table.
Args:
name: A name for the operation (optional).
Returns:
A pair of tensors with the first tensor containing all keys and the
second tensors containing all values in the table.
"""
with ops.name_scope(name, "%s_lookup_table_export_values" % self.name,
[self.resource_handle]):
with ops.colocate_with(self.resource_handle):
exported_keys, exported_values = gen_lookup_ops.lookup_table_export_v2(
self.resource_handle, self._key_dtype, self._value_dtype)
return exported_keys, exported_values
def _gather_saveables_for_checkpoint(self):
"""For object-based checkpointing."""
return {
"table":
functools.partial(
MutableHashTable._Saveable, table=self, name=self._name)
}
class _Saveable(BaseSaverBuilder.SaveableObject):
"""SaveableObject implementation for MutableHashTable."""
def __init__(self, table, name):
tensors = table.export()
specs = [
BaseSaverBuilder.SaveSpec(tensors[0], "", name + "-keys"),
BaseSaverBuilder.SaveSpec(tensors[1], "", name + "-values")
]
# pylint: disable=protected-access
super(MutableHashTable._Saveable, self).__init__(table, specs, name)
def restore(self, restored_tensors, restored_shapes, name=None):
del restored_shapes # unused
# pylint: disable=protected-access
with ops.name_scope(name, "%s_table_restore" % self.name):
with ops.colocate_with(self.op.resource_handle):
return gen_lookup_ops.lookup_table_import_v2(self.op.resource_handle,
restored_tensors[0],
restored_tensors[1])
@tf_export("lookup.experimental.DenseHashTable")
class DenseHashTable(LookupInterface):
"""A generic mutable hash table implementation using tensors as backing store.
Data can be inserted by calling the insert method and removed by calling the
remove method. It does not support initialization via the init method.
It uses "open addressing" with quadratic reprobing to resolve collisions.
Compared to `MutableHashTable` the insert, remove and lookup operations in a
`DenseHashTable` are typically faster, but memory usage can be higher.
However, `DenseHashTable` does not require additional memory for
temporary tensors created during checkpointing and restore operations.
Example usage:
```python
table = tf.lookup.DenseHashTable(key_dtype=tf.int64,
value_dtype=tf.int64,
default_value=-1,
empty_key=0,
deleted_key=-1)
sess.run(table.insert(keys, values))
out = table.lookup(query_keys)
print(out.eval())
```
"""
# TODO(andreasst): consider extracting common code with MutableHashTable into
# a common superclass.
def __init__(self,
key_dtype,
value_dtype,
default_value,
empty_key,
deleted_key,
initial_num_buckets=None,
name="MutableDenseHashTable",
checkpoint=True):
"""Creates an empty `DenseHashTable` object.
Creates a table, the type of its keys and values are specified by key_dtype
and value_dtype, respectively.
Args:
key_dtype: the type of the key tensors.
value_dtype: the type of the value tensors.
default_value: The value to use if a key is missing in the table.
empty_key: the key to use to represent empty buckets internally. Must not
be used in insert, remove or lookup operations.
deleted_key: the key to use to represent deleted buckets internally. Must
not be used in insert, remove or lookup operations and be different from
the empty_key.
initial_num_buckets: the initial number of buckets.
name: A name for the operation (optional).
checkpoint: if True, the contents of the table are saved to and restored
from checkpoints. If `shared_name` is empty for a checkpointed table, it
is shared using the table node name.
Returns:
A `DenseHashTable` object.
Raises:
ValueError: If checkpoint is True and no name was specified.
"""
self._default_value = ops.convert_to_tensor(
default_value, dtype=value_dtype, name="default_value")
self._key_dtype = key_dtype
self._value_dtype = value_dtype
self._initial_num_buckets = initial_num_buckets
self._value_shape = self._default_value.get_shape()
self._checkpoint = checkpoint
self._name = name
self._empty_key = ops.convert_to_tensor(
empty_key, dtype=key_dtype, name="empty_key")
self._deleted_key = ops.convert_to_tensor(
deleted_key, dtype=key_dtype, name="deleted_key")
self._shared_name = None
if context.executing_eagerly():
# TODO(allenl): This will leak memory due to kernel caching by the
# shared_name attribute value (but is better than the alternative of
# sharing everything by default when executing eagerly; hopefully creating
# tables in a loop is uncommon).
# TODO(rohanj): Use context.shared_name() instead.
self._shared_name = "table_%d" % (ops.uid(),)
super(DenseHashTable, self).__init__(key_dtype, value_dtype)
self._resource_handle = self._create_resource()
if checkpoint:
saveable = DenseHashTable._Saveable(self, name)
if not context.executing_eagerly():
ops.add_to_collection(ops.GraphKeys.SAVEABLE_OBJECTS, saveable)
def _create_resource(self):
# The table must be shared if checkpointing is requested for multi-worker
# training to work correctly. Use the node name if no shared_name has been
# explicitly specified.
use_node_name_sharing = self._checkpoint and self._shared_name is None
table_ref = gen_lookup_ops.mutable_dense_hash_table_v2(
empty_key=self._empty_key,
deleted_key=self._deleted_key,
shared_name=self._shared_name,
use_node_name_sharing=use_node_name_sharing,
value_dtype=self._value_dtype,
value_shape=self._value_shape,
initial_num_buckets=self._initial_num_buckets,
name=self._name)
if context.executing_eagerly():
self._table_name = None
else:
self._table_name = table_ref.op.name.split("/")[-1]
return table_ref
@property
def name(self):
return self._table_name
def size(self, name=None):
"""Compute the number of elements in this table.
Args:
name: A name for the operation (optional).
Returns:
A scalar tensor containing the number of elements in this table.
"""
with ops.name_scope(name, "%s_Size" % self.name, [self.resource_handle]):
with ops.colocate_with(self.resource_handle):
return gen_lookup_ops.lookup_table_size_v2(self.resource_handle)
def lookup(self, keys, name=None):
"""Looks up `keys` in a table, outputs the corresponding values.
The `default_value` is used for keys not present in the table.
Args:
keys: Keys to look up. Can be a tensor of any shape. Must match the
table's key_dtype.
name: A name for the operation (optional).
Returns:
A tensor containing the values in the same shape as `keys` using the
table's value type.
Raises:
TypeError: when `keys` do not match the table data types.
"""
with ops.name_scope(name, "%s_lookup_table_find" % self.name,
[self.resource_handle, keys]):
keys = ops.convert_to_tensor(keys, dtype=self._key_dtype, name="keys")
with ops.colocate_with(self.resource_handle):
values = gen_lookup_ops.lookup_table_find_v2(self.resource_handle, keys,
self._default_value)
return values
def insert_or_assign(self, keys, values, name=None):
"""Associates `keys` with `values`.
Args:
keys: Keys to insert. Can be a tensor of any shape. Must match the table's
key type.
values: Values to be associated with keys. Must be a tensor of the same
shape as `keys` and match the table's value type.
name: A name for the operation (optional).
Returns:
The created Operation.
Raises:
TypeError: when `keys` or `values` doesn't match the table data
types.
"""
with ops.name_scope(name, "%s_lookup_table_insert" % self.name,
[self.resource_handle, keys, values]):
keys = ops.convert_to_tensor(keys, dtype=self._key_dtype, name="keys")
values = ops.convert_to_tensor(
values, dtype=self._value_dtype, name="values")
with ops.colocate_with(self.resource_handle):
op = gen_lookup_ops.lookup_table_insert_v2(self.resource_handle, keys,
values)
return op
def insert(self, keys, values, name=None):
"""Associates `keys` with `values`.
Args:
keys: Keys to insert. Can be a tensor of any shape. Must match the table's
key type.
values: Values to be associated with keys. Must be a tensor of the same
shape as `keys` and match the table's value type.
name: A name for the operation (optional).
Returns:
The created Operation.
Raises:
TypeError: when `keys` or `values` doesn't match the table data
types.
"""
return self.insert_or_assign(keys, values, name)
def erase(self, keys, name=None):
"""Removes `keys` and its associated values from the table.
If a key is not present in the table, it is silently ignored.
Args:
keys: Keys to remove. Can be a tensor of any shape. Must match the table's
key type.
name: A name for the operation (optional).
Returns:
The created Operation.
Raises:
TypeError: when `keys` do not match the table data types.
"""
if keys.dtype != self._key_dtype:
raise TypeError("Signature mismatch. Keys must be dtype %s, got %s." %
(self._key_dtype, keys.dtype))
with ops.name_scope(name, "%s_lookup_table_remove" % self.name,
(self.resource_handle, keys, self._default_value)):
# pylint: disable=protected-access
op = gen_lookup_ops.lookup_table_remove_v2(self.resource_handle, keys)
return op
def remove(self, keys, name=None):
"""Removes `keys` and its associated values from the table.
If a key is not present in the table, it is silently ignored.
Args:
keys: Keys to remove. Can be a tensor of any shape. Must match the table's
key type.
name: A name for the operation (optional).
Returns:
The created Operation.
Raises:
TypeError: when `keys` do not match the table data types.
"""
return self.erase(keys, name)
def export(self, name=None):
"""Returns tensors of all keys and values in the table.
Args:
name: A name for the operation (optional).
Returns:
A pair of tensors with the first tensor containing all keys and the
second tensors containing all values in the table.
"""
with ops.name_scope(name, "%s_lookup_table_export_values" % self.name,
[self.resource_handle]):
with ops.colocate_with(self.resource_handle):
exported_keys, exported_values = gen_lookup_ops.lookup_table_export_v2(
self.resource_handle, self._key_dtype, self._value_dtype)
return exported_keys, exported_values
def _gather_saveables_for_checkpoint(self):
"""For object-based checkpointing."""
return {
"table":
functools.partial(
DenseHashTable._Saveable, table=self, name=self._name)
}
class _Saveable(BaseSaverBuilder.SaveableObject):
"""SaveableObject implementation for DenseHashTable."""
def __init__(self, table, name):
tensors = table.export()
specs = [
BaseSaverBuilder.SaveSpec(tensors[0], "", name + "-keys"),
BaseSaverBuilder.SaveSpec(tensors[1], "", name + "-values")
]
# pylint: disable=protected-access
super(DenseHashTable._Saveable, self).__init__(table, specs, name)
def restore(self, restored_tensors, restored_shapes, name=None):
del restored_shapes # unused
# pylint: disable=protected-access
with ops.name_scope(name, "%s_table_restore" % self.name):
with ops.colocate_with(self.op.resource_handle):
return gen_lookup_ops.lookup_table_import_v2(self.op.resource_handle,
restored_tensors[0],
restored_tensors[1])
ops.NotDifferentiable("LookupTableFind")
ops.NotDifferentiable("LookupTableFindV2")
ops.NotDifferentiable("LookupTableInsert")
ops.NotDifferentiable("LookupTableInsertV2")
ops.NotDifferentiable("LookupTableSize")
ops.NotDifferentiable("LookupTableSizeV2")
ops.NotDifferentiable("HashTable")
ops.NotDifferentiable("HashTableV2")
ops.NotDifferentiable("InitializeTable")
ops.NotDifferentiable("InitializeTableV2")
ops.NotDifferentiable("InitializeTableFromTextFile")
ops.NotDifferentiable("InitializeTableFromTextFileV2")
ops.NotDifferentiable("MutableDenseHashTable")
ops.NotDifferentiable("MutableDenseHashTableV2")
ops.NotDifferentiable("MutableHashTable")
ops.NotDifferentiable("MutableHashTableV2")
ops.NotDifferentiable("MutableHashTableOfTensors")
ops.NotDifferentiable("MutableHashTableOfTensorsV2")
|
PypiClean
|
/zegami-sdk-testrelease-0.4.6.tar.gz/zegami-sdk-testrelease-0.4.6/README.md
|
# Zegami Python SDK
An SDK and general wrapper for the lower level Zegami API for Python. This package provides higher level collection interaction and data retrieval.
# Getting started
Grab this repo, open the script, and load an instance of ZegamiClient into a variable.
```
from zegami_sdk.client import ZegamiClient
zc = ZegamiClient(username, login)
```
## Credentials
The client operates using a user token. By default, logging in once with a valid username/password will save the acquired token to your home directory as
`zegami.token`. The next time you need to use ZegamiClient, you may call `zc = ZegamiClient()` with no arguments, and it will look for this stored token.
## Example Usage
### Get the metadata and images associated with every dog of the 'beagle' breed in a collection of dogs:
```
zc = ZegamiClient()
```
### Workspaces
To see your available workspaces, use:
```
zc.show_workspaces()
```
You can then ask for a workspace by name, by ID, or just from a list
```
all_workspaces = zc.workspaces
first_workspace = all_workspaces[0]
```
or:
```
zc.show_workspaces()
# Note the ID of a workspace
my_workspace = zc.get_workspace_by_id(id)
```
### Collections
```
my_workspace.show_collections()
# Note the name of a collection
coll = my_workspace.get_collection_by_name(name_of_collection)
```
You can get the metadata in a collection as a Pandas DataFrame using:
```
rows = coll.rows
```
You can get the images of a collection using:
```
first_10_img_urls = coll.get_image_urls(list(range(10)))
imgs = coll.download_image_batch(first_10_img_urls)
```
If your collection supports the new multi-image-source functionality, you can see your available sources using:
```
coll.show_sources()
```
For source 2's (3rd in 0-indexed-list) images, you would use:
```
first_10_source3_img_urls = novo_col.get_image_urls(list(range(10)), source=2)`
# To see the first of these:
coll.download_image(first_10_source3_img_urls[0])
```
### Using with onprem zegami
To use the client with an onprem installation of zegami you have to set the `home` keyword argument when instantiating `ZegamiClient`.
```
zegami_config = {
'username': <user>,
'password': <password>,
'home': <url of onprem zegami>,
'allow_save_token': True,
}
zc = ZegamiClient(**zegami_config)
```
If your onprem installation has self-signed certificates you can disable SSL verification using the environment variable `ALLOW_INSECURE_SSL` before running the python.
```
export ALLOW_INSECURE_SSL=true
python myscript.py
```
or
```
ALLOW_INSECURE_SSL=true python myscript.py
```
WARNING! You should not need to set this when using the SDK for cloud zegami
# In Development
This SDK is in active development, not all features are available yet. Creating/uploading to collections is not supported currently - check back soon!
# Developer Conventions
Keeping the SDK easy and fluent to use externally and internally is crucial. If contributing PRs, some things to consider:
## Relevant
MOST IMPORTANT - Zegami has concepts used internally in its data engine, like 'imageset', 'dataset'. Strive to never require the user to have to know anything about these, or even see them. If the user needs an image, they should ask for an image from a concept they ARE expected to understand like a 'collection' or a 'workspace'. Anything obscure should be hidden, for example: `_get_imageset()`, so that auto-suggestions of a class will always contain relevant and useful methods/attribs/properties.
## Obvious
Avoid ambiguous parameters. Use the best worded, lowest level parameters types for functions/methods. Give them obvious names. Any ambiguity or unobvious parameters MUST be described in detail in the docstring. Avoid parameters like 'target' or 'action', or describe them explicitly. If an instance is needed, describe how/where that instance should come from.
## `assert`
If you expect an RGB image, check that your input is an array, that its len(shape) == 3, that shape[2] == 3. Use a proper message if this is not the case.
## Minimal
Do not ask for more information than is already obtainable. A source knows its parent collection, which knows how to get its own IDs and knows the client. A method never needs to reference a source, the owning collection, and the client all together. Moreover, these chains should have sensible assertions and checks built in, and potentially property/method-based shortcuts (with assertions).
## Helpful
Use sensible defaults wherever possible for minimal effort when using the SDK. V1 collections typically use `source=None`, while V2 collections use `source=0`. This allows a user with an old/new (single source) collection to never even have to know what a source is when fetching images.
|
PypiClean
|
/uniBert-0.2.1.tar.gz/uniBert-0.2.1/unibert/bert_qa/bert_qa.py
|
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from pytorch_transformers import *
from torch.nn.functional import softmax, sigmoid
from sklearn.preprocessing import MultiLabelBinarizer
from nlp2 import *
class BCEFocalLoss(nn.Module):
def __init__(self, gamma=2):
super(BCEFocalLoss, self).__init__()
self.gamma = 2
def forward(self, input, target):
BCE_loss = F.binary_cross_entropy_with_logits(input, target, reduction='none')
pt = torch.exp(-BCE_loss) # prevents nans when probability 0
focal_loss = (1 - pt) ** self.gamma * BCE_loss
return focal_loss.mean()
class BCEGWLoss(nn.Module):
def __init__(self):
super(BCEGWLoss, self).__init__()
def gaussian(self, x, mean=0.5, variance=0.25):
for i, v in enumerate(x.data):
x.data[i] = math.exp(-(v - mean) ** 2 / (2.0 * variance ** 2))
return x
def forward(self, input, target):
BCE_loss = F.binary_cross_entropy_with_logits(input, target, reduction='none')
BCE_loss = BCE_loss.view(-1)
pt = sigmoid(BCE_loss) # prevents nans when probability 0
loss = (self.gaussian(pt, variance=0.1 * math.exp(1), mean=0.5) - 0.1 * pt) * BCE_loss
return loss.mean()
class GWLoss(nn.Module):
def __init__(self):
super(GWLoss, self).__init__()
def gaussian(self, x, mean=0.5, variance=0.25):
for i, v in enumerate(x.data):
x[i] = math.exp(-(v - mean) ** 2 / (2.0 * variance ** 2))
return x
def forward(self, input, target):
if input.dim() > 2:
input = input.view(input.size(0), input.size(1), -1)
input = input.transpose(1, 2)
input = input.contiguous().view(-1, input.size(2))
target = target.view(-1, 1)
logpt = F.log_softmax(input)
logpt = logpt.gather(1, target)
logpt = logpt.view(-1)
pt = Variable(logpt.data.exp())
loss = -1 * (self.gaussian(pt, variance=0.1 * math.exp(1), mean=0.5) - 0.1 * pt) * logpt
return loss.mean()
class BertMtClassifier(nn.Module):
def __init__(self, tasks_detail, bert_model="bert-base-chinese", dropout=0.2):
super().__init__()
self.device = 'cuda' if torch.cuda.is_available() else 'cpu'
print('Using device:', self.device)
self.tokenizer = BertTokenizer.from_pretrained(bert_model)
self.bert = BertModel.from_pretrained(bert_model)
self.dropout = nn.Dropout(dropout)
self.loss_fct = nn.CrossEntropyLoss()
self.loss_fct_mt = BCEFocalLoss()
# self.loss_fct = FocalLoss()
# self.loss_fct = GWLoss()
self.tasks = dict()
self.tasks_detail = tasks_detail
self.classifier_list = nn.ModuleList()
for task, labels in tasks_detail.items():
self.classifier_list.append(nn.Linear(self.bert.config.hidden_size, len(labels)).to(self.device))
self.tasks[task] = len(self.classifier_list) - 1
self.bert = self.bert.to(self.device)
self.loss_fct = self.loss_fct.to(self.device)
self.loss_fct_mt = self.loss_fct_mt.to(self.device)
def forward(self, task, inputs, targets=None, eval=False):
result_logits = []
result_labels = []
result_item = []
for id in range(len(inputs)):
task_id = self.tasks[task[id]]
task_lables = self.tasks_detail[task[id]]
# bert embedding
input_token = self.tokenizer.tokenize("[CLS] " + inputs[id] + " [SEP]")
token_input_id = self.tokenizer.convert_tokens_to_ids(input_token)
tokenized_input = []
for i in sliding_widows_larger_step(token_input_id, 256):
tokenized_input.append(torch.tensor([i], dtype=torch.long).to(self.device))
res = ""
for input_pic in tokenized_input:
output = self.bert(input_pic)
if isinstance(res, str):
res = output[1]
else:
res = torch.cat((res, output[1]), dim=0)
res = torch.mean(res, 0, keepdim=True)
pooled_output = self.dropout(res)
# classifier
classifier_output = self.classifier_list[task_id](pooled_output)
logits = torch.sum(classifier_output, dim=0)
reshaped_logits = classifier_output.view(-1, len(task_lables))
if targets is not None:
target = targets[id]
if 'multi_target' in task[id]:
mlb = MultiLabelBinarizer(classes=task_lables)
tar = mlb.fit_transform([target.split("/")])
tokenize_label = torch.tensor(tar, dtype=torch.float).to(self.device)
else:
tokenize_label = torch.tensor([task_lables.index(target)], dtype=torch.long).to(self.device)
result_labels.append(tokenize_label)
result_logits.append(reshaped_logits)
if 'multi_target' in task[id]:
reshaped_logits = sigmoid(reshaped_logits)
else:
reshaped_logits = softmax(reshaped_logits)
logit_prob = reshaped_logits[0].data.tolist()
result_item.append(dict(zip(task_lables, logit_prob)))
# output
if targets is not None:
loss = 0
for i in range(len(result_logits)):
if 'multi_target' in task[i]:
loss += self.loss_fct_mt(result_logits[i], result_labels[i])
else:
loss += self.loss_fct(result_logits[i], result_labels[i])
if eval:
return loss, result_item
else:
return loss
else:
return result_item
|
PypiClean
|
/horizon-23.2.0.tar.gz/horizon-23.2.0/doc/source/install/install-ubuntu.rst
|
================================
Install and configure for Ubuntu
================================
This section describes how to install and configure the dashboard
on the controller node.
The only core service required by the dashboard is the Identity service.
You can use the dashboard in combination with other services, such as
Image service, Compute, and Networking. You can also use the dashboard
in environments with stand-alone services such as Object Storage.
.. note::
This section assumes proper installation, configuration, and operation
of the Identity service using the Apache HTTP server and Memcached
service.
Install and configure components
--------------------------------
.. include:: note_configuration_vary_by_distribution.txt
1. Install the packages:
.. code-block:: console
# apt install openstack-dashboard
.. end
2. Edit the
``/etc/openstack-dashboard/local_settings.py``
file and complete the following actions:
* Configure the dashboard to use OpenStack services on the
``controller`` node:
.. path /etc/openstack-dashboard/local_settings.py
.. code-block:: python
OPENSTACK_HOST = "controller"
.. end
* In the Dashboard configuration section, allow your hosts to access
Dashboard:
.. path /etc/openstack-dashboard/local_settings.py
.. code-block:: python
ALLOWED_HOSTS = ['one.example.com', 'two.example.com']
.. end
.. note::
- Do not edit the ``ALLOWED_HOSTS`` parameter under the Ubuntu
configuration section.
- ``ALLOWED_HOSTS`` can also be ``['*']`` to accept all hosts. This
may be useful for development work, but is potentially insecure
and should not be used in production. See the
`Django documentation
<https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts>`_
for further information.
* Configure the ``memcached`` session storage service:
.. path /etc/openstack-dashboard/local_settings.py
.. code-block:: python
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': 'controller:11211',
}
}
.. end
.. note::
Comment out any other session storage configuration.
* Enable the Identity API version 3:
.. path /etc/openstack-dashboard/local_settings.py
.. code-block:: python
OPENSTACK_KEYSTONE_URL = "http://%s/identity/v3" % OPENSTACK_HOST
.. end
.. note::
In case your keystone run at 5000 port then you would mentioned
keystone port here as well i.e.
OPENSTACK_KEYSTONE_URL = "http://%s:5000/identity/v3" % OPENSTACK_HOST
* Enable support for domains:
.. path /etc/openstack-dashboard/local_settings.py
.. code-block:: python
OPENSTACK_KEYSTONE_MULTIDOMAIN_SUPPORT = True
.. end
* Configure API versions:
.. path /etc/openstack-dashboard/local_settings.py
.. code-block:: python
OPENSTACK_API_VERSIONS = {
"identity": 3,
"image": 2,
"volume": 3,
}
.. end
* Configure ``Default`` as the default domain for users that you create
via the dashboard:
.. path /etc/openstack-dashboard/local_settings.py
.. code-block:: python
OPENSTACK_KEYSTONE_DEFAULT_DOMAIN = "Default"
.. end
* Configure ``user`` as the default role for
users that you create via the dashboard:
.. path /etc/openstack-dashboard/local_settings.py
.. code-block:: python
OPENSTACK_KEYSTONE_DEFAULT_ROLE = "user"
.. end
* If you chose networking option 1, disable support for layer-3
networking services:
.. path /etc/openstack-dashboard/local_settings.py
.. code-block:: python
OPENSTACK_NEUTRON_NETWORK = {
...
'enable_router': False,
'enable_quotas': False,
'enable_ipv6': False,
'enable_distributed_router': False,
'enable_ha_router': False,
'enable_fip_topology_check': False,
}
.. end
* Optionally, configure the time zone:
.. path /etc/openstack-dashboard/local_settings.py
.. code-block:: python
TIME_ZONE = "TIME_ZONE"
.. end
Replace ``TIME_ZONE`` with an appropriate time zone identifier.
For more information, see the `list of time zones
<https://en.wikipedia.org/wiki/List_of_tz_database_time_zones>`__.
3. Add the following line to
``/etc/apache2/conf-available/openstack-dashboard.conf`` if not included.
.. code-block:: none
WSGIApplicationGroup %{GLOBAL}
Finalize installation
---------------------
* Reload the web server configuration:
.. code-block:: console
# systemctl reload apache2.service
.. end
|
PypiClean
|
/noc-0.7(3).tar.gz/noc-0.7(3)/sa/apps/switchportvalidation/views.py
|
## NOC modules
from noc.lib.app.saapplication import SAApplication
##
## Reduce task for switchport validation
##
def switchport_validation_reduce(task):
from noc.lib.app.simplereport import Report, TableSection, SectionRow
from noc.lib.text import list_to_ranges
switchports = {} # object -> interface -> (description, set of vlans)
macs = {} # object -> interface -> set of vlans
# Collect data
for mt in task.maptask_set.filter(status="C"):
o = mt.managed_object
if mt.map_script.endswith(".get_mac_address_table"):
# Populate macs
macs[o] = {}
for m in mt.script_result:
for i in m["interfaces"]:
if i not in macs[o]:
macs[o][i] = set()
macs[o][i].add(m["vlan_id"])
elif mt.map_script.endswith(".get_switchport"):
# Populate switchports
switchports[o] = {}
for p in mt.script_result:
if not p["status"]:
# Skip ports in shutdown
continue
i = p["interface"]
if i not in switchports[o]:
switchports[o][i] = (p.get("description", ""), set())
if "untagged" in p and p["untagged"]:
switchports[o][i][1].add(p["untagged"])
if p["tagged"]:
switchports[o][i][1].update(p["tagged"])
else:
raise Exception("Inconsistent map task")
if not macs or not switchports:
return "Failed to retrieve the data!!!"
# Process data
data = []
for o in switchports:
if o not in macs or not macs[o]:
continue
# Find inconsistent ports
inconsistent_ports = [] # (port, swtichport vlans, excessive vlans)
for i in switchports[o]:
if i not in macs[o]:
# No mac data for port
inconsistent_ports += [
(i, switchports[o][i][0], switchports[o][i][1], None)]
else:
# Remove intersection
v = switchports[o][i][1] - macs[o][i]
if v:
inconsistent_ports += [
(i, switchports[o][i][0], switchports[o][i][1], v)]
# Add to data if inconsistent ports found
if inconsistent_ports:
data += [SectionRow(o.name)]
data += [(p, d, list_to_ranges(v),
list_to_ranges(e) if e is not None else "No MACs found")
for p, d, v, e in
sorted(inconsistent_ports, lambda x, y:cmp(x[0], y[0]))]
#
if not data:
return "Failed to retrieve data!!!"
# Build report
r = Report()
r.append_section(TableSection("", columns=["Port", "Description",
"Switchport VLANs",
"Excessive VLANs"], data=data))
return r
##
##
##
class SwitchportValidationAppplication(SAApplication):
title = "Switchport Validation"
menu = "Tasks | Switchport Validation"
reduce_task = switchport_validation_reduce
map_task = ["get_mac_address_table", "get_switchport"]
|
PypiClean
|
/alcoholic_tfe22540-0.1.12.tar.gz/alcoholic_tfe22540-0.1.12/tfe22540/comportement.py
|
import seaborn as sns
import pandas as pd
import numpy as np
import xlsxwriter
import matplotlib.patches as mpatches
import matplotlib.pyplot as plt
from tfe22540.perso_path import perso_path_string, patient_number_list
perso_path, excel_path, subjects_path, patients_path, plot_path, atlas_path = perso_path_string()
def get_comportements(file_path, patient_list):
"""
Parameters
----------
file_path : File path in string
Excel file containing General_data or behavioural data (provided by Melissa).
patient_list : List of strings
Returns
-------
patients_all : List of tuples
List containing as many tuples as patients, each summarizing the important behavioural informations.
"""
worksheet = pd.read_excel(file_path)
tabular = worksheet.to_numpy()
patients_all = []
for i in patient_list:
for j in range(len(worksheet["Numéro"])):
if (i == worksheet["Numéro"][j]):
patients_all.append(tabular[j,:])
return patients_all
patient_number = patient_number_list("int_nb")
patients_all = get_comportements(excel_path + "Comportements_data.xlsx", patient_number)
def comportement_to_excel(patients_all, only_percentage):
"""
Parameters
----------
patients_all : List returned by get_comportements() function
Returns
-------
None. Excel file summarizing information contained in patients_all.
"""
if (only_percentage == False):
workbook = xlsxwriter.Workbook(excel_path + 'Behavioural_data.xlsx')
else:
workbook = xlsxwriter.Workbook(excel_path + 'Behavioural_data_only_percentage.xlsx')
patient_data = patients_all.copy()
for i in patient_data:
if(i[0] == 2 or i[0] == 4 or i[0] == 5 or i[0] == 8 or i[0] == 9):
worksheet = workbook.add_worksheet("0"+str(int(i[0])))
else:
worksheet = workbook.add_worksheet(str(int(i[0])))
if (only_percentage == False):
list_data = ["Unités", "Osmolalité", "T1_BDI", "T1_OCDS_MODIFIE_Total", "T1_OCDS_Obsessions", "T1_OCDS_Compulsions", "T1_STAI_YA", "T1_MFI",
"T2_Bearni", "T2_BDI", "T2_OCDS_MODIFIE_Total", "T2_OCDS_Obsessions", "T2_OCDS_Compulsions", "T2_STAI_YA", "T2_MFI",
"Percentage BDI", "Percentage OCDS_MODIFIE_Total", "Percentage OCDS_Obsessions", "Percentage OCDS_Compulsions", "Percentage STAI_YA", "Percentage MFI"]
else:
list_data = ["Percentage BDI", "Percentage OCDS_MODIFIE_Total", "Percentage OCDS_Obsessions", "Percentage OCDS_Compulsions", "Percentage STAI_YA", "Percentage MFI"]
azer = 2
if (only_percentage == False):
start = 1
else:
start = 16
for j,k in zip(range(start,len(i)),list_data):
worksheet.write('A'+str(azer), k)
if(i[j] != "/"):
if(np.isnan(i[j])==True):
i[j] = 0
else:
i[j] = 0
worksheet.write('B'+str(azer), i[j])
azer += 1
workbook.close()
comportement_to_excel(patients_all, False)
comportement_to_excel(patients_all, True)
cluster0 = ['05','26','35','36','37','40','41']
cluster1 = ['02', '04', '08', '09', '11', '12', '14', '15', '18', '20', '21', '22', '24', '27', '28', '30', '34', '39', '42', '45']
cluster2 = ['13', '17', '19', '31', '32', '33', '43', '46']
clusters = [cluster0,cluster1,cluster2]
list_data = ["Unités", "Osmolalité", "T1_BDI", "T1_OCDS_MODIFIE_Total", "T1_OCDS_Obsessions", "T1_OCDS_Compulsions", "T1_STAI_YA", "T1_MFI",
"T2_Bearni", "T2_BDI", "T2_OCDS_MODIFIE_Total", "T2_OCDS_Obsessions", "T2_OCDS_Compulsions", "T2_STAI_YA", "T2_MFI",
"Percentage BDI", "Percentage OCDS_MODIFIE_Total", "Percentage OCDS_Obsessions", "Percentage OCDS_Compulsions", "Percentage STAI_YA", "Percentage MFI"]
color = ["blue", "orange", "#008000"]
labels = ["Cluster 0", "Cluster 1", "Cluster 2"]
for k in range(6): #6
for data in [[2+k,9+k]]:
to_plot_cluster = []
pops = []
fig = plt.figure(figsize=(14.5,8))
j = 0
for cluster, couleur, nom_clust in zip(clusters, color, labels):
to_plot = []
for i in cluster:
workbook = pd.read_excel(excel_path + 'Behavioural_data.xlsx', sheet_name=i)
worksheet = workbook.to_numpy()
if k < 6:
to_plot.append(worksheet[data[1],1]-worksheet[data[0],1])
else:
to_plot.append(worksheet[data[1],1])
y = np.ones(len(to_plot))*j
plt.scatter(to_plot, y, s=400,color=couleur,alpha=0.5)
pop_a = mpatches.Patch(color=couleur, label=nom_clust)
pops.append(pop_a)
j += 1
new_name = worksheet[data[1],0].replace('T2_', '')
new_name = new_name.replace('_', ' ')
plt.axvline(x=0,linestyle="--",color="gray")
plt.title("Difference of "+ new_name + " between T2 and T1")
plt.xlabel("Score [/]")
plt.legend(handles=pops)
plt.yticks([])
fig.tight_layout()
fig.savefig(plot_path + "[Behavior] - Difference of "+ new_name + " between E2 and E1.pdf")
fig = plt.figure(figsize=(14.5,8))
j = 0
to_plot_cluster = []
pops = []
for cluster, couleur, nom_clust in zip(clusters, color, labels):
print(cluster)
to_plot = []
for i in cluster:
workbook = pd.read_excel(excel_path + 'Behavioural_data.xlsx', sheet_name=i)
worksheet = workbook.to_numpy()
to_plot.append(worksheet[0,1])
y = np.ones(len(to_plot))*j
plt.scatter(to_plot, y, s=400,color=couleur,alpha=0.5)
pop_a = mpatches.Patch(color=couleur, label=nom_clust)
pops.append(pop_a)
j += 1
plt.title("Alcohol consumption during the week before E1")
plt.xlabel("Alcohol unit [/]")
plt.axvline(x=0,linestyle="--",color="gray")
plt.legend(handles=pops)
plt.yticks([])
fig.tight_layout()
fig.savefig(plot_path + "[Behavior] - Alcohol consumption during the week before E1.pdf")
to_plot_cluster = []
pops = []
fig = plt.figure(figsize=(14.5,8))
for cluster, couleur, nom_clust in zip(clusters, color, labels):
to_plot0 = []
to_plot1 = []
to_plot2 = []
to_plot3 = []
to_plot4 = []
to_plot5 = []
for i in cluster:
workbook = pd.read_excel(excel_path + 'Behavioural_data.xlsx', sheet_name=i)
worksheet = workbook.to_numpy()
to_plot0.append(worksheet[15,1])
to_plot1.append(worksheet[16,1])
to_plot4.append(worksheet[19,1])
to_plot5.append(worksheet[20,1])
y0 = np.ones(len(to_plot0))*(1/4)
y1 = np.ones(len(to_plot1))*(2/4)
y4 = np.ones(len(to_plot4))*(3/4)
y5 = np.ones(len(to_plot5))*(4/4)
if (nom_clust == "Cluster 0"):
y0 = np.ones(len(to_plot0))*(1/4+0.06)
y1 = np.ones(len(to_plot1))*(2/4+0.06)
y4 = np.ones(len(to_plot4))*(3/4+0.06)
y5 = np.ones(len(to_plot5))*(4/4+0.06)
elif (nom_clust == "Cluster 2"):
y0 = np.ones(len(to_plot0))*(1/4-0.06)
y1 = np.ones(len(to_plot1))*(2/4-0.06)
y4 = np.ones(len(to_plot4))*(3/4-0.06)
y5 = np.ones(len(to_plot5))*(4/4-0.06)
plt.scatter(to_plot0, y0, s=400, color=couleur, alpha=0.5)
plt.scatter(to_plot1, y1, s=400, color=couleur, alpha=0.5)
plt.scatter(to_plot4, y4, s=400, color=couleur, alpha=0.5)
plt.scatter(to_plot5, y5, s=400, color=couleur, alpha=0.5)
pop_a = mpatches.Patch(color=couleur, label=nom_clust)
pops.append(pop_a)
plt.title("Percentage change between E2 and E1")
plt.xlabel("Variation [%]")
plt.legend(handles=pops)
plt.ylim(0.0,5/4)
plt.yticks(ticks=[1/4,2/4,3/4,4/4],labels=["BDI", "OCDS - Total", "STAI YA", "MFI"])
fig.tight_layout()
fig.savefig(plot_path + "[Behavior] - Percentage change between E2 and E1.pdf")
workbook = pd.read_excel(excel_path + "violin_plot.xlsx", sheet_name="1")
workbook1 = pd.read_excel(excel_path + "violin_plot.xlsx", sheet_name="2")
# #37483E et black
fig, ax = plt.subplots(2,1,figsize = (20,16),gridspec_kw={'height_ratios': [1,3]})
sns.violinplot(y="Behavioral metrics", x="[/]", hue=" ",palette = ["navajowhite", "#56af59ff"],data=workbook1, split=True, ax=ax[0])
sns.stripplot(y="Behavioral metrics", x="[/]", hue=" ",data=workbook1, palette = ["orange", "#008000"], jitter = 0, split=True, ax=ax[0])
ax[0].set_title("Alcohol consumption during the week before E1",fontsize=25)
ax[0].set_ylabel("",fontsize = 0)
ax[0].set_xlabel("Alcohol unit [/]",fontsize = 23)
ax[0].set_xlim(-170,70)
ax[0].tick_params(axis='x', labelsize= 21)
ax[0].tick_params(axis='y', labelsize= 21)
ax[0].axvline(x=0,linestyle="--",color="gray")
sns.violinplot(y="Behavioral metrics", x="[/]", hue=" ",palette = ["navajowhite", "#56af59ff"], data=workbook, split=True, ax=ax[1])
sns.stripplot(y="Behavioral metrics", x="[/]", hue=" ",data=workbook, palette = ["orange", "#008000"], jitter = 0, split=True, ax=ax[1])
ax[1].set_title("Difference of behavioral metrics between E2 and E1",fontsize=25)
ax[1].set_ylabel("",fontsize = 0)
ax[1].set_xlabel("Score [/]",fontsize = 23)
ax[1].tick_params(axis='x', labelsize= 21)
ax[1].tick_params(axis='y', labelsize= 21)
ax[1].axvline(x=0,linestyle="--",color="gray")
ax[1].set_xlim(-170,70)
fig.supylabel("Behavioral metrics",fontsize = 23)
fig.tight_layout()
fig.savefig(plot_path + "[Behavior] - Violin plot.pdf")
|
PypiClean
|
/changelly-0.11.tar.gz/changelly-0.11/README.md
|
## Changelly Exchange API Client
[](https://www.gnu.org/licenses/lgpl-3.0)

[](http://makeapullrequest.com)
This module helps you to interact with changelly api easily from your Python3 applications.
## Installation
Use pip to install the module:
```
pip install changelly
```
## Basic Usage
Initialize changelly client object
```
from Changelly.changelly import ChangellyApi
client = ChangellyApi('apikey','apisecret')
params={
"from": "eth",
"to": "btc",
"amount": "1"
}
x=client.getExchangeAmount('1',**params)
print(x)
```
## Documentation
* The Official API documentation can be found [here](https://api-docs.changelly.com/).
## Contributing
Feel free to contribute to this project.
|
PypiClean
|
/dnsdb2-1.1.4.tar.gz/dnsdb2-1.1.4/README.md
|
# Farsight DNSDB Version 2 with Flexible Search SDK for Python
[Farsight Security DNSDB®](https://www.farsightsecurity.com/solutions/dnsdb/) is the world’s largest DNS intelligence database that provides a unique, fact-based, multifaceted view of the configuration of the global Internet infrastructure. DNSDB leverages the richness of Farsight’s Security Information Exchange (SIE) data-sharing platform and is engineered and operated by leading DNS experts. Farsight collects Passive DNS data from its global sensor array. It then filters and verifies the DNS transactions before inserting them into the DNSDB, along with ICANN-sponsored zone file access download data. The end result is the highest-quality and most comprehensive DNS intelligence data service of its kind - with more than 100 billion DNS records since 2010.
This software development kit for Python 3 implements all features of the [DNSDB Version 2](https://docs.dnsdb.info/dnsdb-apiv2/) with Flexible Search API.
## Requirements
- Python 3.6 or greater.
- [Python requests](http://python-requests.org).
- [Requests mock](https://pypi.org/project/requests-mock/) for running the test suite.
- A [DNSDB API key](https://www.farsightsecurity.com/solutions/dnsdb/).
To purchase DNSDB, please complete the [application form](https://www.farsightsecurity.com/order-form/). Our due diligence process requires that you provide answers for all required fields in the application. We must be able to positively establish your identity and projected use case, so your cooperation in completing this information will be greatly appreciated and expedite the approval process. Once your application is completed, Farsight Security will review and respond to your request within two business days.
DNSDB Free 30-day Trial Key: Farsight’s [API Key portability program](https://www.farsightsecurity.com/trial-api/) lets you unlock the power of DNS intelligence across dozens of SIEM, Orchestration, Automation and Threat Intelligence Platforms that already support Farsight's DNSDB RESTful API.
## Examples
Import the dnsdb2 library and configure a client.
```python
import dnsdb2
client = dnsdb2.Client(apikey, swclient="yourappname", version="v0.0")
```
Perform a flex regex search for `farsight`. This manually suppresses `QueryLimited` exceptions raised by the server if the query results exceed the row limited.
```python
results = list(client.flex_rdata_regex('farsight', ignore_limited=True))
```
Lookup rrsets for `*.dnsdb.info` with rrtype `A`.
```python
results = list(client.lookup_rrset("*.dnsdb.info", rrtype='A', ignore_limited=True))
```
Summarize rdata records for `104.244.14.0/24` seen within the past 90 days.
```python
results = next(client.summarize_rdata_ip("104.244.14.0/24", time_last_after=-60*60*24*90, ignore_limited=True))
```
Iterate through a large result set by re-issuing queries with increasing offsets after `QueryLimited` is raised.
```python
limit = 1000
offset = 0
results = list()
while True:
try:
for res in client.lookup_rrset("farsightsecurity.com", limit=limit, offset=offset):
results.append(res)
except dnsdb2.QueryLimited:
offset += limit
else:
break
```
## API Documentation
https://docs.dnsdb.info/dnsdb-apiv2/
https://docs.dnsdb.info/dnsdb-flex/
### Table of Contents
* [Client](#dnsdb2.Client)
* [ping](#dnsdb2.Client.ping)
* [rate\_limit](#dnsdb2.Client.rate_limit)
* [lookup\_rrset](#dnsdb2.Client.lookup_rrset)
* [summarize\_rrset](#dnsdb2.Client.summarize_rrset)
* [lookup\_rdata\_name](#dnsdb2.Client.lookup_rdata_name)
* [summarize\_rdata\_name](#dnsdb2.Client.summarize_rdata_name)
* [lookup\_rdata\_ip](#dnsdb2.Client.lookup_rdata_ip)
* [summarize\_rdata\_ip](#dnsdb2.Client.summarize_rdata_ip)
* [lookup\_rdata\_raw](#dnsdb2.Client.lookup_rdata_raw)
* [summarize\_rdata\_raw](#dnsdb2.Client.summarize_rdata_raw)
* [flex\_rrnames\_regex](#dnsdb2.Client.flex_rrnames_regex)
* [flex\_rrnames\_glob](#dnsdb2.Client.flex_rrnames_glob)
* [flex\_rdata\_regex](#dnsdb2.Client.flex_rdata_regex)
* [flex\_rdata\_glob](#dnsdb2.Client.flex_rdata_glob)
* [DnsdbException](#dnsdb2.DnsdbException)
* [AccessDenied](#dnsdb2.AccessDenied)
* [OffsetError](#dnsdb2.OffsetError)
* [QuotaExceeded](#dnsdb2.QuotaExceeded)
* [ConcurrencyExceeded](#dnsdb2.ConcurrencyExceeded)
* [QueryError](#dnsdb2.QueryError)
* [QueryFailed](#dnsdb2.QueryFailed)
* [QueryLimited](#dnsdb2.QueryLimited)
* [QueryTruncated](#dnsdb2.QueryTruncated)
* [ProtocolError](#dnsdb2.ProtocolError)
<a name="dnsdb2.Client"></a>
### Client Objects
```
| dnsdb2.Client(apikey: str, server: str = 'https://api.dnsdb.info',
| swclient: str = 'dnsdb2-py', version: str = '0.0',
| proxies: Dict[str, str] = None, insecure: bool = False)
| A client for DNSDB protocol version 2 with Flex Search.
|
| Args:
| apikey (str): A DNSDB API key
| server (str): The DNSDB API server endpoint
| swclient (str): The name of the client software reported to DNSDB.
| version (str): The version of the software reported to DNSDB.
| proxies (Dict[str, str]): HTTP proxies to use. Mapping of protocol to URL.
| insecure (bool): Skip https validation.
```
<a name="dnsdb2.Client.ping"></a>
#### ping
```
| ping(self) -> bool
| Tests end to end connectivity tests to the DNSDB API endpoint, letting
| you know that there are no firewall blockages.
```
<a name="dnsdb2.Client.rate_limit"></a>
#### rate\_limit
```
| rate_limit(self) -> dict
| Retrieves quota information as described in the DNSDB API v2 documentation.
```
<a name="dnsdb2.Client.lookup_rrset"></a>
#### lookup\_rrset
```
| lookup_rrset = f(self, owner_name: str, rrtype: str = None, bailiwick: str = None, ignore_limited: bool = False, **params)
| Executes a lookup rrset query.
|
| Args:
| owner_name (str): A DNS owner name in presentation format or wildcards.
|
| Wildcards are one of two forms: a left-hand (*.example.com) or
| right-hand (www.example.*) wildcard domain name. An owner name with a
| leading asterisk and label separator, (i.e., *.) will perform a
| wildcard search for any RRsets whose owner names end with the given
| domain name. An owner name with a trailing label separator and asterisk
| (i.e., .*) will perform a wildcard search for any RRsets whose owner
| names start with the given label(s). Note that left-hand wildcard
| queries are somewhat more expensive and slower than right-hand wildcard
| queries.
| rrtype (str): a DNS RRtype mnemonic.
|
| bailiwick (str): A DNS bailiwick in presentation format or wildcards.
| time_first_before (int): provide results before the defined timestamp for
| when the DNS record was first observed. For example, the URL parameter
| “time_first_before=1420070400” will only provide matching DNS records
| that were first observed before (or older than) January 1, 2015.
|
| time_first_after (int): provide results after the defined timestamp for when
| the DNS record was first observed. For example, the URL parameter
| “time_first_after=-31536000” will only provide results that were first
| observed within the last year.
|
| time_last_before (int): provide results before the defined timestamp for
| when the DNS record was last observed. For example, the URL parameter
| “time_last_before=1356998400” will only provide results for DNS records
| that were last observed before 2013.
|
| time_last_after (int): provide results after the defined timestamp for when
| the DNS record was last observed. For example, the URL parameter
| “time_last_after=-2678400” will only provide results that were last
| observed after 31 days ago.
|
| limit (int): Limit for the number of results returned via these lookup
| methods. There is a built-in limit to the number of results that are
| returned via these lookup methods. The default limit is set at 10,000.
| This limit can be raised or lowered by setting the “limit” query
| parameter.
|
| There is also a maximum number of results allowed; requesting a limit
| greater than the maximum will only return the maximum. See results_max
| below for information on that maximum. If “?limit=0” is used then DNSDB
| will return the maximum number of results allowed. Obviously, if there
| are less results for the query than the requested limit, only the actual
| amount can be returned.
|
| id (str): Client software specific identity of the user of the API client.
| Comprised of an alphanumeric string, a colon, and an alphanumeric
| string, limited to thirty characters. This may be logged by the DNSDB
| API server.
|
| aggr (bool): Aggregated results group identical rrsets across all time
| periods and is the classic behavior from querying the DNSDB. This means
| you could get the total number of times an rrset has been observed, but
| not when it was observed. Unaggregated results ungroup identical rrsets,
| allowing you to see how the domain name was resolved in the DNS across
| the full-time range covered in DNSDB (subject to time fencing). This can
| give a more accurate impression of record request volume across time
| because it will reveal the distinct timestamps of records whose values
| are repeated. You can answer questions like, “Was a domain parked for a
| long time, mostly unused, until it was repurposed for serving malware or
| relaying spam, but then was abandoned again?” It allows you to see if a
| record was observed heavily in the last week vs. having been observed
| constantly for years.
|
| humantime (bool): A value that is True if time values (in time_first,
| time_last, zone_time_first, zone_time_last) should be returned in human
| readable (RFC3339 compliant) format or False if Unix-style time values
| in seconds since the epoch should be returned. False is the classic
| behavior from querying the DNSDB and is the default value for this
| option.
|
| ignore_limited(bool): Suppress QueryLimited exceptions.
|
| offset (int): How many rows to offset (e.g. skip) in the results.
| This implements an incremental result transfer feature, allowing you to
| view more of the available results for a single query. The rows are
| offset prior to the limit parameter being applied, therefore offset
| allows seeing additional results past a limit that matches the maximum
| number of results. Note that DNSDB recalculates the results for each
| query and the order of results might not be preserved. Therefore, this
| capability is not a valid way to walk all results over multiple queries
| – some results might be missing and some might be duplicated. The actual
| offset that can be used is limited or for certain API keys, offset is
| not allowed – see the offset_max rate_limit key below.
```
<a name="dnsdb2.Client.summarize_rrset"></a>
#### summarize\_rrset
```
| summarize_rrset = f(self, owner_name: str, rrtype: str = None, bailiwick: str = None, ignore_limited: bool = False, **params)
| Executes a summarize rrset query.
|
| Args:
| owner_name (str): A DNS owner name in presentation format or wildcards.
|
| Wildcards are one of two forms: a left-hand (*.example.com) or
| right-hand (www.example.*) wildcard domain name. An owner name with a
| leading asterisk and label separator, (i.e., *.) will perform a
| wildcard search for any RRsets whose owner names end with the given
| domain name. An owner name with a trailing label separator and asterisk
| (i.e., .*) will perform a wildcard search for any RRsets whose owner
| names start with the given label(s). Note that left-hand wildcard
| queries are somewhat more expensive and slower than right-hand wildcard
| queries.
| rrtype (str): a DNS RRtype mnemonic.
|
| bailiwick (str): A DNS bailiwick in presentation format or wildcards.
| time_first_before (int): provide results before the defined timestamp for
| when the DNS record was first observed. For example, the URL parameter
| “time_first_before=1420070400” will only provide matching DNS records
| that were first observed before (or older than) January 1, 2015.
|
| time_first_after (int): provide results after the defined timestamp for when
| the DNS record was first observed. For example, the URL parameter
| “time_first_after=-31536000” will only provide results that were first
| observed within the last year.
|
| time_last_before (int): provide results before the defined timestamp for
| when the DNS record was last observed. For example, the URL parameter
| “time_last_before=1356998400” will only provide results for DNS records
| that were last observed before 2013.
|
| time_last_after (int): provide results after the defined timestamp for when
| the DNS record was last observed. For example, the URL parameter
| “time_last_after=-2678400” will only provide results that were last
| observed after 31 days ago.
|
| limit (int): Limit for the number of results returned via these lookup
| methods. There is a built-in limit to the number of results that are
| returned via these lookup methods. The default limit is set at 10,000.
| This limit can be raised or lowered by setting the “limit” query
| parameter.
|
| There is also a maximum number of results allowed; requesting a limit
| greater than the maximum will only return the maximum. See results_max
| below for information on that maximum. If “?limit=0” is used then DNSDB
| will return the maximum number of results allowed. Obviously, if there
| are less results for the query than the requested limit, only the actual
| amount can be returned.
|
| id (str): Client software specific identity of the user of the API client.
| Comprised of an alphanumeric string, a colon, and an alphanumeric
| string, limited to thirty characters. This may be logged by the DNSDB
| API server.
|
| aggr (bool): Aggregated results group identical rrsets across all time
| periods and is the classic behavior from querying the DNSDB. This means
| you could get the total number of times an rrset has been observed, but
| not when it was observed. Unaggregated results ungroup identical rrsets,
| allowing you to see how the domain name was resolved in the DNS across
| the full-time range covered in DNSDB (subject to time fencing). This can
| give a more accurate impression of record request volume across time
| because it will reveal the distinct timestamps of records whose values
| are repeated. You can answer questions like, “Was a domain parked for a
| long time, mostly unused, until it was repurposed for serving malware or
| relaying spam, but then was abandoned again?” It allows you to see if a
| record was observed heavily in the last week vs. having been observed
| constantly for years.
|
| humantime (bool): A value that is True if time values (in time_first,
| time_last, zone_time_first, zone_time_last) should be returned in human
| readable (RFC3339 compliant) format or False if Unix-style time values
| in seconds since the epoch should be returned. False is the classic
| behavior from querying the DNSDB and is the default value for this
| option.
|
| ignore_limited(bool): Suppress QueryLimited exceptions.
|
| max_count (int): max_count controls stopping when we reach that summary
| count. The resulting total count can exceed max_count as it will include
| the entire count from the last rrset examined.
|
| The default is to not constrain the count.
```
<a name="dnsdb2.Client.lookup_rdata_name"></a>
#### lookup\_rdata\_name
```
| lookup_rdata_name = f(self, name: str, rrtype: str = None, ignore_limited: bool = False, **params)
| Executes a lookup data name query.
|
| Args:
| name (str): a DNS domain name in presentation format, or a left-hand
| (`.example.com`) or right-hand (`www.example.`) wildcard domain name.
| Note that left-hand wildcard queries are somewhat more expensive than
| right-hand wildcard queries.
| rrtype (str): a DNS RRtype mnemonic.
|
| time_first_before (int): provide results before the defined timestamp for
| when the DNS record was first observed. For example, the URL parameter
| “time_first_before=1420070400” will only provide matching DNS records
| that were first observed before (or older than) January 1, 2015.
|
| time_first_after (int): provide results after the defined timestamp for when
| the DNS record was first observed. For example, the URL parameter
| “time_first_after=-31536000” will only provide results that were first
| observed within the last year.
|
| time_last_before (int): provide results before the defined timestamp for
| when the DNS record was last observed. For example, the URL parameter
| “time_last_before=1356998400” will only provide results for DNS records
| that were last observed before 2013.
|
| time_last_after (int): provide results after the defined timestamp for when
| the DNS record was last observed. For example, the URL parameter
| “time_last_after=-2678400” will only provide results that were last
| observed after 31 days ago.
|
| limit (int): Limit for the number of results returned via these lookup
| methods. There is a built-in limit to the number of results that are
| returned via these lookup methods. The default limit is set at 10,000.
| This limit can be raised or lowered by setting the “limit” query
| parameter.
|
| There is also a maximum number of results allowed; requesting a limit
| greater than the maximum will only return the maximum. See results_max
| below for information on that maximum. If “?limit=0” is used then DNSDB
| will return the maximum number of results allowed. Obviously, if there
| are less results for the query than the requested limit, only the actual
| amount can be returned.
|
| id (str): Client software specific identity of the user of the API client.
| Comprised of an alphanumeric string, a colon, and an alphanumeric
| string, limited to thirty characters. This may be logged by the DNSDB
| API server.
|
| aggr (bool): Aggregated results group identical rrsets across all time
| periods and is the classic behavior from querying the DNSDB. This means
| you could get the total number of times an rrset has been observed, but
| not when it was observed. Unaggregated results ungroup identical rrsets,
| allowing you to see how the domain name was resolved in the DNS across
| the full-time range covered in DNSDB (subject to time fencing). This can
| give a more accurate impression of record request volume across time
| because it will reveal the distinct timestamps of records whose values
| are repeated. You can answer questions like, “Was a domain parked for a
| long time, mostly unused, until it was repurposed for serving malware or
| relaying spam, but then was abandoned again?” It allows you to see if a
| record was observed heavily in the last week vs. having been observed
| constantly for years.
|
| humantime (bool): A value that is True if time values (in time_first,
| time_last, zone_time_first, zone_time_last) should be returned in human
| readable (RFC3339 compliant) format or False if Unix-style time values
| in seconds since the epoch should be returned. False is the classic
| behavior from querying the DNSDB and is the default value for this
| option.
|
| ignore_limited(bool): Suppress QueryLimited exceptions.
|
| offset (int): How many rows to offset (e.g. skip) in the results.
| This implements an incremental result transfer feature, allowing you to
| view more of the available results for a single query. The rows are
| offset prior to the limit parameter being applied, therefore offset
| allows seeing additional results past a limit that matches the maximum
| number of results. Note that DNSDB recalculates the results for each
| query and the order of results might not be preserved. Therefore, this
| capability is not a valid way to walk all results over multiple queries
| – some results might be missing and some might be duplicated. The actual
| offset that can be used is limited or for certain API keys, offset is
| not allowed – see the offset_max rate_limit key below.
```
<a name="dnsdb2.Client.summarize_rdata_name"></a>
#### summarize\_rdata\_name
```
| summarize_rdata_name = f(self, name: str, rrtype: str = None, ignore_limited: bool = False, **params)
| Executes a summarize data name query.
|
| Args:
| name (str): a DNS domain name in presentation format, or a left-hand
| (`.example.com`) or right-hand (`www.example.`) wildcard domain name.
| Note that left-hand wildcard queries are somewhat more expensive than
| right-hand wildcard queries.
| rrtype (str): a DNS RRtype mnemonic.
|
| time_first_before (int): provide results before the defined timestamp for
| when the DNS record was first observed. For example, the URL parameter
| “time_first_before=1420070400” will only provide matching DNS records
| that were first observed before (or older than) January 1, 2015.
|
| time_first_after (int): provide results after the defined timestamp for when
| the DNS record was first observed. For example, the URL parameter
| “time_first_after=-31536000” will only provide results that were first
| observed within the last year.
|
| time_last_before (int): provide results before the defined timestamp for
| when the DNS record was last observed. For example, the URL parameter
| “time_last_before=1356998400” will only provide results for DNS records
| that were last observed before 2013.
|
| time_last_after (int): provide results after the defined timestamp for when
| the DNS record was last observed. For example, the URL parameter
| “time_last_after=-2678400” will only provide results that were last
| observed after 31 days ago.
|
| limit (int): Limit for the number of results returned via these lookup
| methods. There is a built-in limit to the number of results that are
| returned via these lookup methods. The default limit is set at 10,000.
| This limit can be raised or lowered by setting the “limit” query
| parameter.
|
| There is also a maximum number of results allowed; requesting a limit
| greater than the maximum will only return the maximum. See results_max
| below for information on that maximum. If “?limit=0” is used then DNSDB
| will return the maximum number of results allowed. Obviously, if there
| are less results for the query than the requested limit, only the actual
| amount can be returned.
|
| id (str): Client software specific identity of the user of the API client.
| Comprised of an alphanumeric string, a colon, and an alphanumeric
| string, limited to thirty characters. This may be logged by the DNSDB
| API server.
|
| aggr (bool): Aggregated results group identical rrsets across all time
| periods and is the classic behavior from querying the DNSDB. This means
| you could get the total number of times an rrset has been observed, but
| not when it was observed. Unaggregated results ungroup identical rrsets,
| allowing you to see how the domain name was resolved in the DNS across
| the full-time range covered in DNSDB (subject to time fencing). This can
| give a more accurate impression of record request volume across time
| because it will reveal the distinct timestamps of records whose values
| are repeated. You can answer questions like, “Was a domain parked for a
| long time, mostly unused, until it was repurposed for serving malware or
| relaying spam, but then was abandoned again?” It allows you to see if a
| record was observed heavily in the last week vs. having been observed
| constantly for years.
|
| humantime (bool): A value that is True if time values (in time_first,
| time_last, zone_time_first, zone_time_last) should be returned in human
| readable (RFC3339 compliant) format or False if Unix-style time values
| in seconds since the epoch should be returned. False is the classic
| behavior from querying the DNSDB and is the default value for this
| option.
|
| ignore_limited(bool): Suppress QueryLimited exceptions.
|
| max_count (int): max_count controls stopping when we reach that summary
| count. The resulting total count can exceed max_count as it will include
| the entire count from the last rrset examined.
|
| The default is to not constrain the count.
```
<a name="dnsdb2.Client.lookup_rdata_ip"></a>
#### lookup\_rdata\_ip
```
| lookup_rdata_ip = f(self, ip: str, ignore_limited: bool = False, **params)
| Executes a lookup data ip query.
|
| Args:
| ip (str): One of an IPv4 or IPv6 single address, with a prefix length, or
| with an address range.
| time_first_before (int): provide results before the defined timestamp for
| when the DNS record was first observed. For example, the URL parameter
| “time_first_before=1420070400” will only provide matching DNS records
| that were first observed before (or older than) January 1, 2015.
|
| time_first_after (int): provide results after the defined timestamp for when
| the DNS record was first observed. For example, the URL parameter
| “time_first_after=-31536000” will only provide results that were first
| observed within the last year.
|
| time_last_before (int): provide results before the defined timestamp for
| when the DNS record was last observed. For example, the URL parameter
| “time_last_before=1356998400” will only provide results for DNS records
| that were last observed before 2013.
|
| time_last_after (int): provide results after the defined timestamp for when
| the DNS record was last observed. For example, the URL parameter
| “time_last_after=-2678400” will only provide results that were last
| observed after 31 days ago.
|
| limit (int): Limit for the number of results returned via these lookup
| methods. There is a built-in limit to the number of results that are
| returned via these lookup methods. The default limit is set at 10,000.
| This limit can be raised or lowered by setting the “limit” query
| parameter.
|
| There is also a maximum number of results allowed; requesting a limit
| greater than the maximum will only return the maximum. See results_max
| below for information on that maximum. If “?limit=0” is used then DNSDB
| will return the maximum number of results allowed. Obviously, if there
| are less results for the query than the requested limit, only the actual
| amount can be returned.
|
| id (str): Client software specific identity of the user of the API client.
| Comprised of an alphanumeric string, a colon, and an alphanumeric
| string, limited to thirty characters. This may be logged by the DNSDB
| API server.
|
| aggr (bool): Aggregated results group identical rrsets across all time
| periods and is the classic behavior from querying the DNSDB. This means
| you could get the total number of times an rrset has been observed, but
| not when it was observed. Unaggregated results ungroup identical rrsets,
| allowing you to see how the domain name was resolved in the DNS across
| the full-time range covered in DNSDB (subject to time fencing). This can
| give a more accurate impression of record request volume across time
| because it will reveal the distinct timestamps of records whose values
| are repeated. You can answer questions like, “Was a domain parked for a
| long time, mostly unused, until it was repurposed for serving malware or
| relaying spam, but then was abandoned again?” It allows you to see if a
| record was observed heavily in the last week vs. having been observed
| constantly for years.
|
| humantime (bool): A value that is True if time values (in time_first,
| time_last, zone_time_first, zone_time_last) should be returned in human
| readable (RFC3339 compliant) format or False if Unix-style time values
| in seconds since the epoch should be returned. False is the classic
| behavior from querying the DNSDB and is the default value for this
| option.
|
| ignore_limited(bool): Suppress QueryLimited exceptions.
|
| offset (int): How many rows to offset (e.g. skip) in the results.
| This implements an incremental result transfer feature, allowing you to
| view more of the available results for a single query. The rows are
| offset prior to the limit parameter being applied, therefore offset
| allows seeing additional results past a limit that matches the maximum
| number of results. Note that DNSDB recalculates the results for each
| query and the order of results might not be preserved. Therefore, this
| capability is not a valid way to walk all results over multiple queries
| – some results might be missing and some might be duplicated. The actual
| offset that can be used is limited or for certain API keys, offset is
| not allowed – see the offset_max rate_limit key below.
```
<a name="dnsdb2.Client.summarize_rdata_ip"></a>
#### summarize\_rdata\_ip
```
| summarize_rdata_ip = f(self, ip: str, ignore_limited: bool = False, **params)
| Executes a summarize data ip query.
|
| Args:
| ip (str): One of an IPv4 or IPv6 single address, with a prefix length, or
| with an address range.
| time_first_before (int): provide results before the defined timestamp for
| when the DNS record was first observed. For example, the URL parameter
| “time_first_before=1420070400” will only provide matching DNS records
| that were first observed before (or older than) January 1, 2015.
|
| time_first_after (int): provide results after the defined timestamp for when
| the DNS record was first observed. For example, the URL parameter
| “time_first_after=-31536000” will only provide results that were first
| observed within the last year.
|
| time_last_before (int): provide results before the defined timestamp for
| when the DNS record was last observed. For example, the URL parameter
| “time_last_before=1356998400” will only provide results for DNS records
| that were last observed before 2013.
|
| time_last_after (int): provide results after the defined timestamp for when
| the DNS record was last observed. For example, the URL parameter
| “time_last_after=-2678400” will only provide results that were last
| observed after 31 days ago.
|
| limit (int): Limit for the number of results returned via these lookup
| methods. There is a built-in limit to the number of results that are
| returned via these lookup methods. The default limit is set at 10,000.
| This limit can be raised or lowered by setting the “limit” query
| parameter.
|
| There is also a maximum number of results allowed; requesting a limit
| greater than the maximum will only return the maximum. See results_max
| below for information on that maximum. If “?limit=0” is used then DNSDB
| will return the maximum number of results allowed. Obviously, if there
| are less results for the query than the requested limit, only the actual
| amount can be returned.
|
| id (str): Client software specific identity of the user of the API client.
| Comprised of an alphanumeric string, a colon, and an alphanumeric
| string, limited to thirty characters. This may be logged by the DNSDB
| API server.
|
| aggr (bool): Aggregated results group identical rrsets across all time
| periods and is the classic behavior from querying the DNSDB. This means
| you could get the total number of times an rrset has been observed, but
| not when it was observed. Unaggregated results ungroup identical rrsets,
| allowing you to see how the domain name was resolved in the DNS across
| the full-time range covered in DNSDB (subject to time fencing). This can
| give a more accurate impression of record request volume across time
| because it will reveal the distinct timestamps of records whose values
| are repeated. You can answer questions like, “Was a domain parked for a
| long time, mostly unused, until it was repurposed for serving malware or
| relaying spam, but then was abandoned again?” It allows you to see if a
| record was observed heavily in the last week vs. having been observed
| constantly for years.
|
| humantime (bool): A value that is True if time values (in time_first,
| time_last, zone_time_first, zone_time_last) should be returned in human
| readable (RFC3339 compliant) format or False if Unix-style time values
| in seconds since the epoch should be returned. False is the classic
| behavior from querying the DNSDB and is the default value for this
| option.
|
| ignore_limited(bool): Suppress QueryLimited exceptions.
|
| max_count (int): max_count controls stopping when we reach that summary
| count. The resulting total count can exceed max_count as it will include
| the entire count from the last rrset examined.
|
| The default is to not constrain the count.
```
<a name="dnsdb2.Client.lookup_rdata_raw"></a>
#### lookup\_rdata\_raw
```
| lookup_rdata_raw = f(self, raw_rdata: str, rrtype: str = None, ignore_limited: bool = False, **params)
| Executes a lookup data raw query.
|
| Args:
| raw_rdata (str): An even number of hexadecimal digits specifying a raw
| octet string.
| rrtype (str): a DNS RRtype mnemonic.
|
| time_first_before (int): provide results before the defined timestamp for
| when the DNS record was first observed. For example, the URL parameter
| “time_first_before=1420070400” will only provide matching DNS records
| that were first observed before (or older than) January 1, 2015.
|
| time_first_after (int): provide results after the defined timestamp for when
| the DNS record was first observed. For example, the URL parameter
| “time_first_after=-31536000” will only provide results that were first
| observed within the last year.
|
| time_last_before (int): provide results before the defined timestamp for
| when the DNS record was last observed. For example, the URL parameter
| “time_last_before=1356998400” will only provide results for DNS records
| that were last observed before 2013.
|
| time_last_after (int): provide results after the defined timestamp for when
| the DNS record was last observed. For example, the URL parameter
| “time_last_after=-2678400” will only provide results that were last
| observed after 31 days ago.
|
| limit (int): Limit for the number of results returned via these lookup
| methods. There is a built-in limit to the number of results that are
| returned via these lookup methods. The default limit is set at 10,000.
| This limit can be raised or lowered by setting the “limit” query
| parameter.
|
| There is also a maximum number of results allowed; requesting a limit
| greater than the maximum will only return the maximum. See results_max
| below for information on that maximum. If “?limit=0” is used then DNSDB
| will return the maximum number of results allowed. Obviously, if there
| are less results for the query than the requested limit, only the actual
| amount can be returned.
|
| id (str): Client software specific identity of the user of the API client.
| Comprised of an alphanumeric string, a colon, and an alphanumeric
| string, limited to thirty characters. This may be logged by the DNSDB
| API server.
|
| aggr (bool): Aggregated results group identical rrsets across all time
| periods and is the classic behavior from querying the DNSDB. This means
| you could get the total number of times an rrset has been observed, but
| not when it was observed. Unaggregated results ungroup identical rrsets,
| allowing you to see how the domain name was resolved in the DNS across
| the full-time range covered in DNSDB (subject to time fencing). This can
| give a more accurate impression of record request volume across time
| because it will reveal the distinct timestamps of records whose values
| are repeated. You can answer questions like, “Was a domain parked for a
| long time, mostly unused, until it was repurposed for serving malware or
| relaying spam, but then was abandoned again?” It allows you to see if a
| record was observed heavily in the last week vs. having been observed
| constantly for years.
|
| humantime (bool): A value that is True if time values (in time_first,
| time_last, zone_time_first, zone_time_last) should be returned in human
| readable (RFC3339 compliant) format or False if Unix-style time values
| in seconds since the epoch should be returned. False is the classic
| behavior from querying the DNSDB and is the default value for this
| option.
|
| ignore_limited(bool): Suppress QueryLimited exceptions.
|
| offset (int): How many rows to offset (e.g. skip) in the results.
| This implements an incremental result transfer feature, allowing you to
| view more of the available results for a single query. The rows are
| offset prior to the limit parameter being applied, therefore offset
| allows seeing additional results past a limit that matches the maximum
| number of results. Note that DNSDB recalculates the results for each
| query and the order of results might not be preserved. Therefore, this
| capability is not a valid way to walk all results over multiple queries
| – some results might be missing and some might be duplicated. The actual
| offset that can be used is limited or for certain API keys, offset is
| not allowed – see the offset_max rate_limit key below.
```
<a name="dnsdb2.Client.summarize_rdata_raw"></a>
#### summarize\_rdata\_raw
```
| summarize_rdata_raw = f(self, raw_rdata: str, rrtype: str = None, ignore_limited: bool = False, **params)
| Executes a summarize data raw query.
|
| Args:
| raw_rdata (str): An even number of hexadecimal digits specifying a raw
| octet string.
| rrtype (str): a DNS RRtype mnemonic.
|
| time_first_before (int): provide results before the defined timestamp for
| when the DNS record was first observed. For example, the URL parameter
| “time_first_before=1420070400” will only provide matching DNS records
| that were first observed before (or older than) January 1, 2015.
|
| time_first_after (int): provide results after the defined timestamp for when
| the DNS record was first observed. For example, the URL parameter
| “time_first_after=-31536000” will only provide results that were first
| observed within the last year.
|
| time_last_before (int): provide results before the defined timestamp for
| when the DNS record was last observed. For example, the URL parameter
| “time_last_before=1356998400” will only provide results for DNS records
| that were last observed before 2013.
|
| time_last_after (int): provide results after the defined timestamp for when
| the DNS record was last observed. For example, the URL parameter
| “time_last_after=-2678400” will only provide results that were last
| observed after 31 days ago.
|
| limit (int): Limit for the number of results returned via these lookup
| methods. There is a built-in limit to the number of results that are
| returned via these lookup methods. The default limit is set at 10,000.
| This limit can be raised or lowered by setting the “limit” query
| parameter.
|
| There is also a maximum number of results allowed; requesting a limit
| greater than the maximum will only return the maximum. See results_max
| below for information on that maximum. If “?limit=0” is used then DNSDB
| will return the maximum number of results allowed. Obviously, if there
| are less results for the query than the requested limit, only the actual
| amount can be returned.
|
| id (str): Client software specific identity of the user of the API client.
| Comprised of an alphanumeric string, a colon, and an alphanumeric
| string, limited to thirty characters. This may be logged by the DNSDB
| API server.
|
| aggr (bool): Aggregated results group identical rrsets across all time
| periods and is the classic behavior from querying the DNSDB. This means
| you could get the total number of times an rrset has been observed, but
| not when it was observed. Unaggregated results ungroup identical rrsets,
| allowing you to see how the domain name was resolved in the DNS across
| the full-time range covered in DNSDB (subject to time fencing). This can
| give a more accurate impression of record request volume across time
| because it will reveal the distinct timestamps of records whose values
| are repeated. You can answer questions like, “Was a domain parked for a
| long time, mostly unused, until it was repurposed for serving malware or
| relaying spam, but then was abandoned again?” It allows you to see if a
| record was observed heavily in the last week vs. having been observed
| constantly for years.
|
| humantime (bool): A value that is True if time values (in time_first,
| time_last, zone_time_first, zone_time_last) should be returned in human
| readable (RFC3339 compliant) format or False if Unix-style time values
| in seconds since the epoch should be returned. False is the classic
| behavior from querying the DNSDB and is the default value for this
| option.
|
| ignore_limited(bool): Suppress QueryLimited exceptions.
|
| max_count (int): max_count controls stopping when we reach that summary
| count. The resulting total count can exceed max_count as it will include
| the entire count from the last rrset examined.
|
| The default is to not constrain the count.
```
<a name="dnsdb2.Client.flex_rrnames_regex"></a>
#### flex\_rrnames\_regex
```
| flex_rrnames_regex = f(self, value: str, rrtype: str = None, verbose: bool = True, ignore_limited: bool = False, **params)
| Executes a regex rrnames flex search query.
|
| Args:
| value (str): A regex to match against rrnames.
| rrtype (str): a DNS RRtype mnemonic.
|
| verbose (bool): Set to false to disable `count`, `time_first`, and
| `time_last` fields in output.
| time_first_before (int): provide results before the defined timestamp for
| when the DNS record was first observed. For example, the URL parameter
| “time_first_before=1420070400” will only provide matching DNS records
| that were first observed before (or older than) January 1, 2015.
|
| time_first_after (int): provide results after the defined timestamp for when
| the DNS record was first observed. For example, the URL parameter
| “time_first_after=-31536000” will only provide results that were first
| observed within the last year.
|
| time_last_before (int): provide results before the defined timestamp for
| when the DNS record was last observed. For example, the URL parameter
| “time_last_before=1356998400” will only provide results for DNS records
| that were last observed before 2013.
|
| time_last_after (int): provide results after the defined timestamp for when
| the DNS record was last observed. For example, the URL parameter
| “time_last_after=-2678400” will only provide results that were last
| observed after 31 days ago.
|
| exclude (str): Exclude (i.e. filter-out) results that match the regex.
| limit (int): Limit for the number of results returned via these lookup
| methods. There is a built-in limit to the number of results that are
| returned via these lookup methods. The default limit is set at 10,000.
| This limit can be raised or lowered by setting the “limit” query
| parameter.
|
| There is also a maximum number of results allowed; requesting a limit
| greater than the maximum will only return the maximum. See results_max
| below for information on that maximum. If “?limit=0” is used then DNSDB
| will return the maximum number of results allowed. Obviously, if there
| are less results for the query than the requested limit, only the actual
| amount can be returned.
|
| id (str): Client software specific identity of the user of the API client.
| Comprised of an alphanumeric string, a colon, and an alphanumeric
| string, limited to thirty characters. This may be logged by the DNSDB
| API server.
|
| offset (int): How many rows to offset (e.g. skip) in the results.
| This implements an incremental result transfer feature, allowing you to
| view more of the available results for a single query. The rows are
| offset prior to the limit parameter being applied, therefore offset
| allows seeing additional results past a limit that matches the maximum
| number of results. Note that DNSDB recalculates the results for each
| query and the order of results might not be preserved. Therefore, this
| capability is not a valid way to walk all results over multiple queries
| – some results might be missing and some might be duplicated. The actual
| offset that can be used is limited or for certain API keys, offset is
| not allowed – see the offset_max rate_limit key below.
|
| ignore_limited(bool): Suppress QueryLimited exceptions.
```
<a name="dnsdb2.Client.flex_rrnames_glob"></a>
#### flex\_rrnames\_glob
```
| flex_rrnames_glob = f(self, value: str, rrtype: str = None, verbose: bool = True, ignore_limited: bool = False, **params)
| Executes a glob rrnames flex search query.
|
| Args:
| value (str): A glob to match against rrnames.
| rrtype (str): a DNS RRtype mnemonic.
|
| verbose (bool): Set to false to disable `count`, `time_first`, and
| `time_last` fields in output.
| time_first_before (int): provide results before the defined timestamp for
| when the DNS record was first observed. For example, the URL parameter
| “time_first_before=1420070400” will only provide matching DNS records
| that were first observed before (or older than) January 1, 2015.
|
| time_first_after (int): provide results after the defined timestamp for when
| the DNS record was first observed. For example, the URL parameter
| “time_first_after=-31536000” will only provide results that were first
| observed within the last year.
|
| time_last_before (int): provide results before the defined timestamp for
| when the DNS record was last observed. For example, the URL parameter
| “time_last_before=1356998400” will only provide results for DNS records
| that were last observed before 2013.
|
| time_last_after (int): provide results after the defined timestamp for when
| the DNS record was last observed. For example, the URL parameter
| “time_last_after=-2678400” will only provide results that were last
| observed after 31 days ago.
|
| exclude (str): Exclude (i.e. filter-out) results that match the glob.
| limit (int): Limit for the number of results returned via these lookup
| methods. There is a built-in limit to the number of results that are
| returned via these lookup methods. The default limit is set at 10,000.
| This limit can be raised or lowered by setting the “limit” query
| parameter.
|
| There is also a maximum number of results allowed; requesting a limit
| greater than the maximum will only return the maximum. See results_max
| below for information on that maximum. If “?limit=0” is used then DNSDB
| will return the maximum number of results allowed. Obviously, if there
| are less results for the query than the requested limit, only the actual
| amount can be returned.
|
| id (str): Client software specific identity of the user of the API client.
| Comprised of an alphanumeric string, a colon, and an alphanumeric
| string, limited to thirty characters. This may be logged by the DNSDB
| API server.
|
| offset (int): How many rows to offset (e.g. skip) in the results.
| This implements an incremental result transfer feature, allowing you to
| view more of the available results for a single query. The rows are
| offset prior to the limit parameter being applied, therefore offset
| allows seeing additional results past a limit that matches the maximum
| number of results. Note that DNSDB recalculates the results for each
| query and the order of results might not be preserved. Therefore, this
| capability is not a valid way to walk all results over multiple queries
| – some results might be missing and some might be duplicated. The actual
| offset that can be used is limited or for certain API keys, offset is
| not allowed – see the offset_max rate_limit key below.
|
| ignore_limited(bool): Suppress QueryLimited exceptions.
```
<a name="dnsdb2.Client.flex_rdata_regex"></a>
#### flex\_rdata\_regex
```
| flex_rdata_regex = f(self, value: str, rrtype: str = None, verbose: bool = True, ignore_limited: bool = False, **params)
| Executes a regex rdata flex search query.
|
| Args:
| value (str): A regex to match against rdata.
| rrtype (str): a DNS RRtype mnemonic.
|
| verbose (bool): Set to false to disable `count`, `time_first`, and
| `time_last` fields in output.
| time_first_before (int): provide results before the defined timestamp for
| when the DNS record was first observed. For example, the URL parameter
| “time_first_before=1420070400” will only provide matching DNS records
| that were first observed before (or older than) January 1, 2015.
|
| time_first_after (int): provide results after the defined timestamp for when
| the DNS record was first observed. For example, the URL parameter
| “time_first_after=-31536000” will only provide results that were first
| observed within the last year.
|
| time_last_before (int): provide results before the defined timestamp for
| when the DNS record was last observed. For example, the URL parameter
| “time_last_before=1356998400” will only provide results for DNS records
| that were last observed before 2013.
|
| time_last_after (int): provide results after the defined timestamp for when
| the DNS record was last observed. For example, the URL parameter
| “time_last_after=-2678400” will only provide results that were last
| observed after 31 days ago.
|
| exclude (str): Exclude (i.e. filter-out) results that match the regex.
| limit (int): Limit for the number of results returned via these lookup
| methods. There is a built-in limit to the number of results that are
| returned via these lookup methods. The default limit is set at 10,000.
| This limit can be raised or lowered by setting the “limit” query
| parameter.
|
| There is also a maximum number of results allowed; requesting a limit
| greater than the maximum will only return the maximum. See results_max
| below for information on that maximum. If “?limit=0” is used then DNSDB
| will return the maximum number of results allowed. Obviously, if there
| are less results for the query than the requested limit, only the actual
| amount can be returned.
|
| id (str): Client software specific identity of the user of the API client.
| Comprised of an alphanumeric string, a colon, and an alphanumeric
| string, limited to thirty characters. This may be logged by the DNSDB
| API server.
|
| offset (int): How many rows to offset (e.g. skip) in the results.
| This implements an incremental result transfer feature, allowing you to
| view more of the available results for a single query. The rows are
| offset prior to the limit parameter being applied, therefore offset
| allows seeing additional results past a limit that matches the maximum
| number of results. Note that DNSDB recalculates the results for each
| query and the order of results might not be preserved. Therefore, this
| capability is not a valid way to walk all results over multiple queries
| – some results might be missing and some might be duplicated. The actual
| offset that can be used is limited or for certain API keys, offset is
| not allowed – see the offset_max rate_limit key below.
|
| ignore_limited(bool): Suppress QueryLimited exceptions.
```
<a name="dnsdb2.Client.flex_rdata_glob"></a>
#### flex\_rdata\_glob
```
flex_rdata_glob = f(self, value: str, rrtype: str = None, verbose: bool = True, ignore_limited: bool = False, **params)
| Executes a glob rdata flex search query.
|
| Args:
| value (str): A glob to match against rdata.
| rrtype (str): a DNS RRtype mnemonic.
|
| verbose (bool): Set to false to disable `count`, `time_first`, and
| `time_last` fields in output.
| time_first_before (int): provide results before the defined timestamp for
| when the DNS record was first observed. For example, the URL parameter
| “time_first_before=1420070400” will only provide matching DNS records
| that were first observed before (or older than) January 1, 2015.
|
| time_first_after (int): provide results after the defined timestamp for when
| the DNS record was first observed. For example, the URL parameter
| “time_first_after=-31536000” will only provide results that were first
| observed within the last year.
|
| time_last_before (int): provide results before the defined timestamp for
| when the DNS record was last observed. For example, the URL parameter
| “time_last_before=1356998400” will only provide results for DNS records
| that were last observed before 2013.
|
| time_last_after (int): provide results after the defined timestamp for when
| the DNS record was last observed. For example, the URL parameter
| “time_last_after=-2678400” will only provide results that were last
| observed after 31 days ago.
|
| exclude (str): Exclude (i.e. filter-out) results that match the glob.
| limit (int): Limit for the number of results returned via these lookup
| methods. There is a built-in limit to the number of results that are
| returned via these lookup methods. The default limit is set at 10,000.
| This limit can be raised or lowered by setting the “limit” query
| parameter.
|
| There is also a maximum number of results allowed; requesting a limit
| greater than the maximum will only return the maximum. See results_max
| below for information on that maximum. If “?limit=0” is used then DNSDB
| will return the maximum number of results allowed. Obviously, if there
| are less results for the query than the requested limit, only the actual
| amount can be returned.
|
| id (str): Client software specific identity of the user of the API client.
| Comprised of an alphanumeric string, a colon, and an alphanumeric
| string, limited to thirty characters. This may be logged by the DNSDB
| API server.
|
| offset (int): How many rows to offset (e.g. skip) in the results.
| This implements an incremental result transfer feature, allowing you to
| view more of the available results for a single query. The rows are
| offset prior to the limit parameter being applied, therefore offset
| allows seeing additional results past a limit that matches the maximum
| number of results. Note that DNSDB recalculates the results for each
| query and the order of results might not be preserved. Therefore, this
| capability is not a valid way to walk all results over multiple queries
| – some results might be missing and some might be duplicated. The actual
| offset that can be used is limited or for certain API keys, offset is
| not allowed – see the offset_max rate_limit key below.
|
| ignore_limited(bool): Suppress QueryLimited exceptions.
```
<a name="dnsdb2.DnsdbException"></a>
### DnsdbException Objects
```python
class DnsdbException(Exception)
```
Common base class for all DNSDB exceptions.
<a name="dnsdb2.AccessDenied"></a>
### AccessDenied Objects
```python
class AccessDenied(DnsdbException)
```
Exception raised if the API key is not authorized (usually indicates the
block quota is expired), or the provided API key is not valid, or the
Client IP address not authorized for this API key.
<a name="dnsdb2.OffsetError"></a>
### OffsetError Objects
```python
class OffsetError(DnsdbException)
```
Exception raised if the offset value is greater than the maximum allowed
or if an offset value was provided when not permitted.
<a name="dnsdb2.QuotaExceeded"></a>
### QuotaExceeded Objects
```python
class QuotaExceeded(DnsdbException)
```
Exception raised if you have exceeded your quota and no new requests will
be accepted at this time.
For time-based quotas : The API key’s daily quota limit is exceeded. The
quota will automatically replenish, usually at the start of the next day.
For block-based quotas : The block quota is exhausted. You may need to
purchase a larger quota.
For burst rate secondary quotas : There were too many queries within the
burst window. The window will automatically reopen at its end.
<a name="dnsdb2.ConcurrencyExceeded"></a>
### ConcurrencyExceeded Objects
```python
class ConcurrencyExceeded(DnsdbException)
```
Exception raised if the limit of number of concurrent connections is exceeded.
<a name="dnsdb2.QueryError"></a>
### QueryError Objects
```python
class QueryError(DnsdbException)
```
Exception raised if a communication error occurs while executing a query, or
the server reports an error due to invalid arguments.
<a name="dnsdb2.QueryFailed"></a>
### QueryFailed Objects
```python
class QueryFailed(DnsdbException)
```
Exception raised if an error is reported by the server while a query is running.
<a name="dnsdb2.QueryLimited"></a>
### QueryLimited Objects
```python
class QueryLimited(DnsdbException)
```
Exception raised if the result limit is reached.
<a name="dnsdb2.QueryTruncated"></a>
### QueryTruncated Objects
```python
class QueryTruncated(DnsdbException)
```
Exception raised if query results are incomplete due to a server error.
<a name="dnsdb2.ProtocolError"></a>
### ProtocolError Objects
```python
class ProtocolError(DnsdbException)
```
Exception raised if invalid data is received via the Streaming
Application Framework.
|
PypiClean
|
/mis_modulos-0.1.tar.gz/mis_modulos-0.1/setuptools/_distutils/dist.py
|
import sys
import os
import re
import pathlib
import contextlib
from email import message_from_file
try:
import warnings
except ImportError:
warnings = None
from distutils.errors import (
DistutilsOptionError,
DistutilsModuleError,
DistutilsArgError,
DistutilsClassError,
)
from distutils.fancy_getopt import FancyGetopt, translate_longopt
from distutils.util import check_environ, strtobool, rfc822_escape
from distutils import log
from distutils.debug import DEBUG
# Regex to define acceptable Distutils command names. This is not *quite*
# the same as a Python NAME -- I don't allow leading underscores. The fact
# that they're very similar is no coincidence; the default naming scheme is
# to look for a Python module named after the command.
command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$')
def _ensure_list(value, fieldname):
if isinstance(value, str):
# a string containing comma separated values is okay. It will
# be converted to a list by Distribution.finalize_options().
pass
elif not isinstance(value, list):
# passing a tuple or an iterator perhaps, warn and convert
typename = type(value).__name__
msg = "Warning: '{fieldname}' should be a list, got type '{typename}'"
msg = msg.format(**locals())
log.log(log.WARN, msg)
value = list(value)
return value
class Distribution:
"""The core of the Distutils. Most of the work hiding behind 'setup'
is really done within a Distribution instance, which farms the work out
to the Distutils commands specified on the command line.
Setup scripts will almost never instantiate Distribution directly,
unless the 'setup()' function is totally inadequate to their needs.
However, it is conceivable that a setup script might wish to subclass
Distribution for some specialized purpose, and then pass the subclass
to 'setup()' as the 'distclass' keyword argument. If so, it is
necessary to respect the expectations that 'setup' has of Distribution.
See the code for 'setup()', in core.py, for details.
"""
# 'global_options' describes the command-line options that may be
# supplied to the setup script prior to any actual commands.
# Eg. "./setup.py -n" or "./setup.py --quiet" both take advantage of
# these global options. This list should be kept to a bare minimum,
# since every global option is also valid as a command option -- and we
# don't want to pollute the commands with too many options that they
# have minimal control over.
# The fourth entry for verbose means that it can be repeated.
global_options = [
('verbose', 'v', "run verbosely (default)", 1),
('quiet', 'q', "run quietly (turns verbosity off)"),
('dry-run', 'n', "don't actually do anything"),
('help', 'h', "show detailed help message"),
('no-user-cfg', None, 'ignore pydistutils.cfg in your home directory'),
]
# 'common_usage' is a short (2-3 line) string describing the common
# usage of the setup script.
common_usage = """\
Common commands: (see '--help-commands' for more)
setup.py build will build the package underneath 'build/'
setup.py install will install the package
"""
# options that are not propagated to the commands
display_options = [
('help-commands', None, "list all available commands"),
('name', None, "print package name"),
('version', 'V', "print package version"),
('fullname', None, "print <package name>-<version>"),
('author', None, "print the author's name"),
('author-email', None, "print the author's email address"),
('maintainer', None, "print the maintainer's name"),
('maintainer-email', None, "print the maintainer's email address"),
('contact', None, "print the maintainer's name if known, else the author's"),
(
'contact-email',
None,
"print the maintainer's email address if known, else the author's",
),
('url', None, "print the URL for this package"),
('license', None, "print the license of the package"),
('licence', None, "alias for --license"),
('description', None, "print the package description"),
('long-description', None, "print the long package description"),
('platforms', None, "print the list of platforms"),
('classifiers', None, "print the list of classifiers"),
('keywords', None, "print the list of keywords"),
('provides', None, "print the list of packages/modules provided"),
('requires', None, "print the list of packages/modules required"),
('obsoletes', None, "print the list of packages/modules made obsolete"),
]
display_option_names = [translate_longopt(x[0]) for x in display_options]
# negative options are options that exclude other options
negative_opt = {'quiet': 'verbose'}
# -- Creation/initialization methods -------------------------------
def __init__(self, attrs=None): # noqa: C901
"""Construct a new Distribution instance: initialize all the
attributes of a Distribution, and then use 'attrs' (a dictionary
mapping attribute names to values) to assign some of those
attributes their "real" values. (Any attributes not mentioned in
'attrs' will be assigned to some null value: 0, None, an empty list
or dictionary, etc.) Most importantly, initialize the
'command_obj' attribute to the empty dictionary; this will be
filled in with real command objects by 'parse_command_line()'.
"""
# Default values for our command-line options
self.verbose = 1
self.dry_run = 0
self.help = 0
for attr in self.display_option_names:
setattr(self, attr, 0)
# Store the distribution meta-data (name, version, author, and so
# forth) in a separate object -- we're getting to have enough
# information here (and enough command-line options) that it's
# worth it. Also delegate 'get_XXX()' methods to the 'metadata'
# object in a sneaky and underhanded (but efficient!) way.
self.metadata = DistributionMetadata()
for basename in self.metadata._METHOD_BASENAMES:
method_name = "get_" + basename
setattr(self, method_name, getattr(self.metadata, method_name))
# 'cmdclass' maps command names to class objects, so we
# can 1) quickly figure out which class to instantiate when
# we need to create a new command object, and 2) have a way
# for the setup script to override command classes
self.cmdclass = {}
# 'command_packages' is a list of packages in which commands
# are searched for. The factory for command 'foo' is expected
# to be named 'foo' in the module 'foo' in one of the packages
# named here. This list is searched from the left; an error
# is raised if no named package provides the command being
# searched for. (Always access using get_command_packages().)
self.command_packages = None
# 'script_name' and 'script_args' are usually set to sys.argv[0]
# and sys.argv[1:], but they can be overridden when the caller is
# not necessarily a setup script run from the command-line.
self.script_name = None
self.script_args = None
# 'command_options' is where we store command options between
# parsing them (from config files, the command-line, etc.) and when
# they are actually needed -- ie. when the command in question is
# instantiated. It is a dictionary of dictionaries of 2-tuples:
# command_options = { command_name : { option : (source, value) } }
self.command_options = {}
# 'dist_files' is the list of (command, pyversion, file) that
# have been created by any dist commands run so far. This is
# filled regardless of whether the run is dry or not. pyversion
# gives sysconfig.get_python_version() if the dist file is
# specific to a Python version, 'any' if it is good for all
# Python versions on the target platform, and '' for a source
# file. pyversion should not be used to specify minimum or
# maximum required Python versions; use the metainfo for that
# instead.
self.dist_files = []
# These options are really the business of various commands, rather
# than of the Distribution itself. We provide aliases for them in
# Distribution as a convenience to the developer.
self.packages = None
self.package_data = {}
self.package_dir = None
self.py_modules = None
self.libraries = None
self.headers = None
self.ext_modules = None
self.ext_package = None
self.include_dirs = None
self.extra_path = None
self.scripts = None
self.data_files = None
self.password = ''
# And now initialize bookkeeping stuff that can't be supplied by
# the caller at all. 'command_obj' maps command names to
# Command instances -- that's how we enforce that every command
# class is a singleton.
self.command_obj = {}
# 'have_run' maps command names to boolean values; it keeps track
# of whether we have actually run a particular command, to make it
# cheap to "run" a command whenever we think we might need to -- if
# it's already been done, no need for expensive filesystem
# operations, we just check the 'have_run' dictionary and carry on.
# It's only safe to query 'have_run' for a command class that has
# been instantiated -- a false value will be inserted when the
# command object is created, and replaced with a true value when
# the command is successfully run. Thus it's probably best to use
# '.get()' rather than a straight lookup.
self.have_run = {}
# Now we'll use the attrs dictionary (ultimately, keyword args from
# the setup script) to possibly override any or all of these
# distribution options.
if attrs:
# Pull out the set of command options and work on them
# specifically. Note that this order guarantees that aliased
# command options will override any supplied redundantly
# through the general options dictionary.
options = attrs.get('options')
if options is not None:
del attrs['options']
for (command, cmd_options) in options.items():
opt_dict = self.get_option_dict(command)
for (opt, val) in cmd_options.items():
opt_dict[opt] = ("setup script", val)
if 'licence' in attrs:
attrs['license'] = attrs['licence']
del attrs['licence']
msg = "'licence' distribution option is deprecated; use 'license'"
if warnings is not None:
warnings.warn(msg)
else:
sys.stderr.write(msg + "\n")
# Now work on the rest of the attributes. Any attribute that's
# not already defined is invalid!
for (key, val) in attrs.items():
if hasattr(self.metadata, "set_" + key):
getattr(self.metadata, "set_" + key)(val)
elif hasattr(self.metadata, key):
setattr(self.metadata, key, val)
elif hasattr(self, key):
setattr(self, key, val)
else:
msg = "Unknown distribution option: %s" % repr(key)
warnings.warn(msg)
# no-user-cfg is handled before other command line args
# because other args override the config files, and this
# one is needed before we can load the config files.
# If attrs['script_args'] wasn't passed, assume false.
#
# This also make sure we just look at the global options
self.want_user_cfg = True
if self.script_args is not None:
for arg in self.script_args:
if not arg.startswith('-'):
break
if arg == '--no-user-cfg':
self.want_user_cfg = False
break
self.finalize_options()
def get_option_dict(self, command):
"""Get the option dictionary for a given command. If that
command's option dictionary hasn't been created yet, then create it
and return the new dictionary; otherwise, return the existing
option dictionary.
"""
dict = self.command_options.get(command)
if dict is None:
dict = self.command_options[command] = {}
return dict
def dump_option_dicts(self, header=None, commands=None, indent=""):
from pprint import pformat
if commands is None: # dump all command option dicts
commands = sorted(self.command_options.keys())
if header is not None:
self.announce(indent + header)
indent = indent + " "
if not commands:
self.announce(indent + "no commands known yet")
return
for cmd_name in commands:
opt_dict = self.command_options.get(cmd_name)
if opt_dict is None:
self.announce(indent + "no option dict for '%s' command" % cmd_name)
else:
self.announce(indent + "option dict for '%s' command:" % cmd_name)
out = pformat(opt_dict)
for line in out.split('\n'):
self.announce(indent + " " + line)
# -- Config file finding/parsing methods ---------------------------
def find_config_files(self):
"""Find as many configuration files as should be processed for this
platform, and return a list of filenames in the order in which they
should be parsed. The filenames returned are guaranteed to exist
(modulo nasty race conditions).
There are multiple possible config files:
- distutils.cfg in the Distutils installation directory (i.e.
where the top-level Distutils __inst__.py file lives)
- a file in the user's home directory named .pydistutils.cfg
on Unix and pydistutils.cfg on Windows/Mac; may be disabled
with the ``--no-user-cfg`` option
- setup.cfg in the current directory
- a file named by an environment variable
"""
check_environ()
files = [str(path) for path in self._gen_paths() if os.path.isfile(path)]
if DEBUG:
self.announce("using config files: %s" % ', '.join(files))
return files
def _gen_paths(self):
# The system-wide Distutils config file
sys_dir = pathlib.Path(sys.modules['distutils'].__file__).parent
yield sys_dir / "distutils.cfg"
# The per-user config file
prefix = '.' * (os.name == 'posix')
filename = prefix + 'pydistutils.cfg'
if self.want_user_cfg:
yield pathlib.Path('~').expanduser() / filename
# All platforms support local setup.cfg
yield pathlib.Path('setup.cfg')
# Additional config indicated in the environment
with contextlib.suppress(TypeError):
yield pathlib.Path(os.getenv("DIST_EXTRA_CONFIG"))
def parse_config_files(self, filenames=None): # noqa: C901
from configparser import ConfigParser
# Ignore install directory options if we have a venv
if sys.prefix != sys.base_prefix:
ignore_options = [
'install-base',
'install-platbase',
'install-lib',
'install-platlib',
'install-purelib',
'install-headers',
'install-scripts',
'install-data',
'prefix',
'exec-prefix',
'home',
'user',
'root',
]
else:
ignore_options = []
ignore_options = frozenset(ignore_options)
if filenames is None:
filenames = self.find_config_files()
if DEBUG:
self.announce("Distribution.parse_config_files():")
parser = ConfigParser()
for filename in filenames:
if DEBUG:
self.announce(" reading %s" % filename)
parser.read(filename)
for section in parser.sections():
options = parser.options(section)
opt_dict = self.get_option_dict(section)
for opt in options:
if opt != '__name__' and opt not in ignore_options:
val = parser.get(section, opt)
opt = opt.replace('-', '_')
opt_dict[opt] = (filename, val)
# Make the ConfigParser forget everything (so we retain
# the original filenames that options come from)
parser.__init__()
# If there was a "global" section in the config file, use it
# to set Distribution options.
if 'global' in self.command_options:
for (opt, (src, val)) in self.command_options['global'].items():
alias = self.negative_opt.get(opt)
try:
if alias:
setattr(self, alias, not strtobool(val))
elif opt in ('verbose', 'dry_run'): # ugh!
setattr(self, opt, strtobool(val))
else:
setattr(self, opt, val)
except ValueError as msg:
raise DistutilsOptionError(msg)
# -- Command-line parsing methods ----------------------------------
def parse_command_line(self):
"""Parse the setup script's command line, taken from the
'script_args' instance attribute (which defaults to 'sys.argv[1:]'
-- see 'setup()' in core.py). This list is first processed for
"global options" -- options that set attributes of the Distribution
instance. Then, it is alternately scanned for Distutils commands
and options for that command. Each new command terminates the
options for the previous command. The allowed options for a
command are determined by the 'user_options' attribute of the
command class -- thus, we have to be able to load command classes
in order to parse the command line. Any error in that 'options'
attribute raises DistutilsGetoptError; any error on the
command-line raises DistutilsArgError. If no Distutils commands
were found on the command line, raises DistutilsArgError. Return
true if command-line was successfully parsed and we should carry
on with executing commands; false if no errors but we shouldn't
execute commands (currently, this only happens if user asks for
help).
"""
#
# We now have enough information to show the Macintosh dialog
# that allows the user to interactively specify the "command line".
#
toplevel_options = self._get_toplevel_options()
# We have to parse the command line a bit at a time -- global
# options, then the first command, then its options, and so on --
# because each command will be handled by a different class, and
# the options that are valid for a particular class aren't known
# until we have loaded the command class, which doesn't happen
# until we know what the command is.
self.commands = []
parser = FancyGetopt(toplevel_options + self.display_options)
parser.set_negative_aliases(self.negative_opt)
parser.set_aliases({'licence': 'license'})
args = parser.getopt(args=self.script_args, object=self)
option_order = parser.get_option_order()
log.set_verbosity(self.verbose)
# for display options we return immediately
if self.handle_display_options(option_order):
return
while args:
args = self._parse_command_opts(parser, args)
if args is None: # user asked for help (and got it)
return
# Handle the cases of --help as a "global" option, ie.
# "setup.py --help" and "setup.py --help command ...". For the
# former, we show global options (--verbose, --dry-run, etc.)
# and display-only options (--name, --version, etc.); for the
# latter, we omit the display-only options and show help for
# each command listed on the command line.
if self.help:
self._show_help(
parser, display_options=len(self.commands) == 0, commands=self.commands
)
return
# Oops, no commands found -- an end-user error
if not self.commands:
raise DistutilsArgError("no commands supplied")
# All is well: return true
return True
def _get_toplevel_options(self):
"""Return the non-display options recognized at the top level.
This includes options that are recognized *only* at the top
level as well as options recognized for commands.
"""
return self.global_options + [
(
"command-packages=",
None,
"list of packages that provide distutils commands",
),
]
def _parse_command_opts(self, parser, args): # noqa: C901
"""Parse the command-line options for a single command.
'parser' must be a FancyGetopt instance; 'args' must be the list
of arguments, starting with the current command (whose options
we are about to parse). Returns a new version of 'args' with
the next command at the front of the list; will be the empty
list if there are no more commands on the command line. Returns
None if the user asked for help on this command.
"""
# late import because of mutual dependence between these modules
from distutils.cmd import Command
# Pull the current command from the head of the command line
command = args[0]
if not command_re.match(command):
raise SystemExit("invalid command name '%s'" % command)
self.commands.append(command)
# Dig up the command class that implements this command, so we
# 1) know that it's a valid command, and 2) know which options
# it takes.
try:
cmd_class = self.get_command_class(command)
except DistutilsModuleError as msg:
raise DistutilsArgError(msg)
# Require that the command class be derived from Command -- want
# to be sure that the basic "command" interface is implemented.
if not issubclass(cmd_class, Command):
raise DistutilsClassError(
"command class %s must subclass Command" % cmd_class
)
# Also make sure that the command object provides a list of its
# known options.
if not (
hasattr(cmd_class, 'user_options')
and isinstance(cmd_class.user_options, list)
):
msg = (
"command class %s must provide "
"'user_options' attribute (a list of tuples)"
)
raise DistutilsClassError(msg % cmd_class)
# If the command class has a list of negative alias options,
# merge it in with the global negative aliases.
negative_opt = self.negative_opt
if hasattr(cmd_class, 'negative_opt'):
negative_opt = negative_opt.copy()
negative_opt.update(cmd_class.negative_opt)
# Check for help_options in command class. They have a different
# format (tuple of four) so we need to preprocess them here.
if hasattr(cmd_class, 'help_options') and isinstance(
cmd_class.help_options, list
):
help_options = fix_help_options(cmd_class.help_options)
else:
help_options = []
# All commands support the global options too, just by adding
# in 'global_options'.
parser.set_option_table(
self.global_options + cmd_class.user_options + help_options
)
parser.set_negative_aliases(negative_opt)
(args, opts) = parser.getopt(args[1:])
if hasattr(opts, 'help') and opts.help:
self._show_help(parser, display_options=0, commands=[cmd_class])
return
if hasattr(cmd_class, 'help_options') and isinstance(
cmd_class.help_options, list
):
help_option_found = 0
for (help_option, short, desc, func) in cmd_class.help_options:
if hasattr(opts, parser.get_attr_name(help_option)):
help_option_found = 1
if callable(func):
func()
else:
raise DistutilsClassError(
"invalid help function %r for help option '%s': "
"must be a callable object (function, etc.)"
% (func, help_option)
)
if help_option_found:
return
# Put the options from the command-line into their official
# holding pen, the 'command_options' dictionary.
opt_dict = self.get_option_dict(command)
for (name, value) in vars(opts).items():
opt_dict[name] = ("command line", value)
return args
def finalize_options(self):
"""Set final values for all the options on the Distribution
instance, analogous to the .finalize_options() method of Command
objects.
"""
for attr in ('keywords', 'platforms'):
value = getattr(self.metadata, attr)
if value is None:
continue
if isinstance(value, str):
value = [elm.strip() for elm in value.split(',')]
setattr(self.metadata, attr, value)
def _show_help(self, parser, global_options=1, display_options=1, commands=[]):
"""Show help for the setup script command-line in the form of
several lists of command-line options. 'parser' should be a
FancyGetopt instance; do not expect it to be returned in the
same state, as its option table will be reset to make it
generate the correct help text.
If 'global_options' is true, lists the global options:
--verbose, --dry-run, etc. If 'display_options' is true, lists
the "display-only" options: --name, --version, etc. Finally,
lists per-command help for every command name or command class
in 'commands'.
"""
# late import because of mutual dependence between these modules
from distutils.core import gen_usage
from distutils.cmd import Command
if global_options:
if display_options:
options = self._get_toplevel_options()
else:
options = self.global_options
parser.set_option_table(options)
parser.print_help(self.common_usage + "\nGlobal options:")
print('')
if display_options:
parser.set_option_table(self.display_options)
parser.print_help(
"Information display options (just display "
+ "information, ignore any commands)"
)
print('')
for command in self.commands:
if isinstance(command, type) and issubclass(command, Command):
klass = command
else:
klass = self.get_command_class(command)
if hasattr(klass, 'help_options') and isinstance(klass.help_options, list):
parser.set_option_table(
klass.user_options + fix_help_options(klass.help_options)
)
else:
parser.set_option_table(klass.user_options)
parser.print_help("Options for '%s' command:" % klass.__name__)
print('')
print(gen_usage(self.script_name))
def handle_display_options(self, option_order):
"""If there were any non-global "display-only" options
(--help-commands or the metadata display options) on the command
line, display the requested info and return true; else return
false.
"""
from distutils.core import gen_usage
# User just wants a list of commands -- we'll print it out and stop
# processing now (ie. if they ran "setup --help-commands foo bar",
# we ignore "foo bar").
if self.help_commands:
self.print_commands()
print('')
print(gen_usage(self.script_name))
return 1
# If user supplied any of the "display metadata" options, then
# display that metadata in the order in which the user supplied the
# metadata options.
any_display_options = 0
is_display_option = {}
for option in self.display_options:
is_display_option[option[0]] = 1
for (opt, val) in option_order:
if val and is_display_option.get(opt):
opt = translate_longopt(opt)
value = getattr(self.metadata, "get_" + opt)()
if opt in ['keywords', 'platforms']:
print(','.join(value))
elif opt in ('classifiers', 'provides', 'requires', 'obsoletes'):
print('\n'.join(value))
else:
print(value)
any_display_options = 1
return any_display_options
def print_command_list(self, commands, header, max_length):
"""Print a subset of the list of all commands -- used by
'print_commands()'.
"""
print(header + ":")
for cmd in commands:
klass = self.cmdclass.get(cmd)
if not klass:
klass = self.get_command_class(cmd)
try:
description = klass.description
except AttributeError:
description = "(no description available)"
print(" %-*s %s" % (max_length, cmd, description))
def print_commands(self):
"""Print out a help message listing all available commands with a
description of each. The list is divided into "standard commands"
(listed in distutils.command.__all__) and "extra commands"
(mentioned in self.cmdclass, but not a standard command). The
descriptions come from the command class attribute
'description'.
"""
import distutils.command
std_commands = distutils.command.__all__
is_std = {}
for cmd in std_commands:
is_std[cmd] = 1
extra_commands = []
for cmd in self.cmdclass.keys():
if not is_std.get(cmd):
extra_commands.append(cmd)
max_length = 0
for cmd in std_commands + extra_commands:
if len(cmd) > max_length:
max_length = len(cmd)
self.print_command_list(std_commands, "Standard commands", max_length)
if extra_commands:
print()
self.print_command_list(extra_commands, "Extra commands", max_length)
def get_command_list(self):
"""Get a list of (command, description) tuples.
The list is divided into "standard commands" (listed in
distutils.command.__all__) and "extra commands" (mentioned in
self.cmdclass, but not a standard command). The descriptions come
from the command class attribute 'description'.
"""
# Currently this is only used on Mac OS, for the Mac-only GUI
# Distutils interface (by Jack Jansen)
import distutils.command
std_commands = distutils.command.__all__
is_std = {}
for cmd in std_commands:
is_std[cmd] = 1
extra_commands = []
for cmd in self.cmdclass.keys():
if not is_std.get(cmd):
extra_commands.append(cmd)
rv = []
for cmd in std_commands + extra_commands:
klass = self.cmdclass.get(cmd)
if not klass:
klass = self.get_command_class(cmd)
try:
description = klass.description
except AttributeError:
description = "(no description available)"
rv.append((cmd, description))
return rv
# -- Command class/object methods ----------------------------------
def get_command_packages(self):
"""Return a list of packages from which commands are loaded."""
pkgs = self.command_packages
if not isinstance(pkgs, list):
if pkgs is None:
pkgs = ''
pkgs = [pkg.strip() for pkg in pkgs.split(',') if pkg != '']
if "distutils.command" not in pkgs:
pkgs.insert(0, "distutils.command")
self.command_packages = pkgs
return pkgs
def get_command_class(self, command):
"""Return the class that implements the Distutils command named by
'command'. First we check the 'cmdclass' dictionary; if the
command is mentioned there, we fetch the class object from the
dictionary and return it. Otherwise we load the command module
("distutils.command." + command) and fetch the command class from
the module. The loaded class is also stored in 'cmdclass'
to speed future calls to 'get_command_class()'.
Raises DistutilsModuleError if the expected module could not be
found, or if that module does not define the expected class.
"""
klass = self.cmdclass.get(command)
if klass:
return klass
for pkgname in self.get_command_packages():
module_name = "{}.{}".format(pkgname, command)
klass_name = command
try:
__import__(module_name)
module = sys.modules[module_name]
except ImportError:
continue
try:
klass = getattr(module, klass_name)
except AttributeError:
raise DistutilsModuleError(
"invalid command '%s' (no class '%s' in module '%s')"
% (command, klass_name, module_name)
)
self.cmdclass[command] = klass
return klass
raise DistutilsModuleError("invalid command '%s'" % command)
def get_command_obj(self, command, create=1):
"""Return the command object for 'command'. Normally this object
is cached on a previous call to 'get_command_obj()'; if no command
object for 'command' is in the cache, then we either create and
return it (if 'create' is true) or return None.
"""
cmd_obj = self.command_obj.get(command)
if not cmd_obj and create:
if DEBUG:
self.announce(
"Distribution.get_command_obj(): "
"creating '%s' command object" % command
)
klass = self.get_command_class(command)
cmd_obj = self.command_obj[command] = klass(self)
self.have_run[command] = 0
# Set any options that were supplied in config files
# or on the command line. (NB. support for error
# reporting is lame here: any errors aren't reported
# until 'finalize_options()' is called, which means
# we won't report the source of the error.)
options = self.command_options.get(command)
if options:
self._set_command_options(cmd_obj, options)
return cmd_obj
def _set_command_options(self, command_obj, option_dict=None): # noqa: C901
"""Set the options for 'command_obj' from 'option_dict'. Basically
this means copying elements of a dictionary ('option_dict') to
attributes of an instance ('command').
'command_obj' must be a Command instance. If 'option_dict' is not
supplied, uses the standard option dictionary for this command
(from 'self.command_options').
"""
command_name = command_obj.get_command_name()
if option_dict is None:
option_dict = self.get_option_dict(command_name)
if DEBUG:
self.announce(" setting options for '%s' command:" % command_name)
for (option, (source, value)) in option_dict.items():
if DEBUG:
self.announce(" {} = {} (from {})".format(option, value, source))
try:
bool_opts = [translate_longopt(o) for o in command_obj.boolean_options]
except AttributeError:
bool_opts = []
try:
neg_opt = command_obj.negative_opt
except AttributeError:
neg_opt = {}
try:
is_string = isinstance(value, str)
if option in neg_opt and is_string:
setattr(command_obj, neg_opt[option], not strtobool(value))
elif option in bool_opts and is_string:
setattr(command_obj, option, strtobool(value))
elif hasattr(command_obj, option):
setattr(command_obj, option, value)
else:
raise DistutilsOptionError(
"error in %s: command '%s' has no such option '%s'"
% (source, command_name, option)
)
except ValueError as msg:
raise DistutilsOptionError(msg)
def reinitialize_command(self, command, reinit_subcommands=0):
"""Reinitializes a command to the state it was in when first
returned by 'get_command_obj()': ie., initialized but not yet
finalized. This provides the opportunity to sneak option
values in programmatically, overriding or supplementing
user-supplied values from the config files and command line.
You'll have to re-finalize the command object (by calling
'finalize_options()' or 'ensure_finalized()') before using it for
real.
'command' should be a command name (string) or command object. If
'reinit_subcommands' is true, also reinitializes the command's
sub-commands, as declared by the 'sub_commands' class attribute (if
it has one). See the "install" command for an example. Only
reinitializes the sub-commands that actually matter, ie. those
whose test predicates return true.
Returns the reinitialized command object.
"""
from distutils.cmd import Command
if not isinstance(command, Command):
command_name = command
command = self.get_command_obj(command_name)
else:
command_name = command.get_command_name()
if not command.finalized:
return command
command.initialize_options()
command.finalized = 0
self.have_run[command_name] = 0
self._set_command_options(command)
if reinit_subcommands:
for sub in command.get_sub_commands():
self.reinitialize_command(sub, reinit_subcommands)
return command
# -- Methods that operate on the Distribution ----------------------
def announce(self, msg, level=log.INFO):
log.log(level, msg)
def run_commands(self):
"""Run each command that was seen on the setup script command line.
Uses the list of commands found and cache of command objects
created by 'get_command_obj()'.
"""
for cmd in self.commands:
self.run_command(cmd)
# -- Methods that operate on its Commands --------------------------
def run_command(self, command):
"""Do whatever it takes to run a command (including nothing at all,
if the command has already been run). Specifically: if we have
already created and run the command named by 'command', return
silently without doing anything. If the command named by 'command'
doesn't even have a command object yet, create one. Then invoke
'run()' on that command object (or an existing one).
"""
# Already been here, done that? then return silently.
if self.have_run.get(command):
return
log.info("running %s", command)
cmd_obj = self.get_command_obj(command)
cmd_obj.ensure_finalized()
cmd_obj.run()
self.have_run[command] = 1
# -- Distribution query methods ------------------------------------
def has_pure_modules(self):
return len(self.packages or self.py_modules or []) > 0
def has_ext_modules(self):
return self.ext_modules and len(self.ext_modules) > 0
def has_c_libraries(self):
return self.libraries and len(self.libraries) > 0
def has_modules(self):
return self.has_pure_modules() or self.has_ext_modules()
def has_headers(self):
return self.headers and len(self.headers) > 0
def has_scripts(self):
return self.scripts and len(self.scripts) > 0
def has_data_files(self):
return self.data_files and len(self.data_files) > 0
def is_pure(self):
return (
self.has_pure_modules()
and not self.has_ext_modules()
and not self.has_c_libraries()
)
# -- Metadata query methods ----------------------------------------
# If you're looking for 'get_name()', 'get_version()', and so forth,
# they are defined in a sneaky way: the constructor binds self.get_XXX
# to self.metadata.get_XXX. The actual code is in the
# DistributionMetadata class, below.
class DistributionMetadata:
"""Dummy class to hold the distribution meta-data: name, version,
author, and so forth.
"""
_METHOD_BASENAMES = (
"name",
"version",
"author",
"author_email",
"maintainer",
"maintainer_email",
"url",
"license",
"description",
"long_description",
"keywords",
"platforms",
"fullname",
"contact",
"contact_email",
"classifiers",
"download_url",
# PEP 314
"provides",
"requires",
"obsoletes",
)
def __init__(self, path=None):
if path is not None:
self.read_pkg_file(open(path))
else:
self.name = None
self.version = None
self.author = None
self.author_email = None
self.maintainer = None
self.maintainer_email = None
self.url = None
self.license = None
self.description = None
self.long_description = None
self.keywords = None
self.platforms = None
self.classifiers = None
self.download_url = None
# PEP 314
self.provides = None
self.requires = None
self.obsoletes = None
def read_pkg_file(self, file):
"""Reads the metadata values from a file object."""
msg = message_from_file(file)
def _read_field(name):
value = msg[name]
if value and value != "UNKNOWN":
return value
def _read_list(name):
values = msg.get_all(name, None)
if values == []:
return None
return values
metadata_version = msg['metadata-version']
self.name = _read_field('name')
self.version = _read_field('version')
self.description = _read_field('summary')
# we are filling author only.
self.author = _read_field('author')
self.maintainer = None
self.author_email = _read_field('author-email')
self.maintainer_email = None
self.url = _read_field('home-page')
self.license = _read_field('license')
if 'download-url' in msg:
self.download_url = _read_field('download-url')
else:
self.download_url = None
self.long_description = _read_field('description')
self.description = _read_field('summary')
if 'keywords' in msg:
self.keywords = _read_field('keywords').split(',')
self.platforms = _read_list('platform')
self.classifiers = _read_list('classifier')
# PEP 314 - these fields only exist in 1.1
if metadata_version == '1.1':
self.requires = _read_list('requires')
self.provides = _read_list('provides')
self.obsoletes = _read_list('obsoletes')
else:
self.requires = None
self.provides = None
self.obsoletes = None
def write_pkg_info(self, base_dir):
"""Write the PKG-INFO file into the release tree."""
with open(
os.path.join(base_dir, 'PKG-INFO'), 'w', encoding='UTF-8'
) as pkg_info:
self.write_pkg_file(pkg_info)
def write_pkg_file(self, file):
"""Write the PKG-INFO format data to a file object."""
version = '1.0'
if (
self.provides
or self.requires
or self.obsoletes
or self.classifiers
or self.download_url
):
version = '1.1'
# required fields
file.write('Metadata-Version: %s\n' % version)
file.write('Name: %s\n' % self.get_name())
file.write('Version: %s\n' % self.get_version())
def maybe_write(header, val):
if val:
file.write(f"{header}: {val}\n")
# optional fields
maybe_write("Summary", self.get_description())
maybe_write("Home-page", self.get_url())
maybe_write("Author", self.get_contact())
maybe_write("Author-email", self.get_contact_email())
maybe_write("License", self.get_license())
maybe_write("Download-URL", self.download_url)
maybe_write("Description", rfc822_escape(self.get_long_description() or ""))
maybe_write("Keywords", ",".join(self.get_keywords()))
self._write_list(file, 'Platform', self.get_platforms())
self._write_list(file, 'Classifier', self.get_classifiers())
# PEP 314
self._write_list(file, 'Requires', self.get_requires())
self._write_list(file, 'Provides', self.get_provides())
self._write_list(file, 'Obsoletes', self.get_obsoletes())
def _write_list(self, file, name, values):
values = values or []
for value in values:
file.write('{}: {}\n'.format(name, value))
# -- Metadata query methods ----------------------------------------
def get_name(self):
return self.name or "UNKNOWN"
def get_version(self):
return self.version or "0.0.0"
def get_fullname(self):
return "{}-{}".format(self.get_name(), self.get_version())
def get_author(self):
return self.author
def get_author_email(self):
return self.author_email
def get_maintainer(self):
return self.maintainer
def get_maintainer_email(self):
return self.maintainer_email
def get_contact(self):
return self.maintainer or self.author
def get_contact_email(self):
return self.maintainer_email or self.author_email
def get_url(self):
return self.url
def get_license(self):
return self.license
get_licence = get_license
def get_description(self):
return self.description
def get_long_description(self):
return self.long_description
def get_keywords(self):
return self.keywords or []
def set_keywords(self, value):
self.keywords = _ensure_list(value, 'keywords')
def get_platforms(self):
return self.platforms
def set_platforms(self, value):
self.platforms = _ensure_list(value, 'platforms')
def get_classifiers(self):
return self.classifiers or []
def set_classifiers(self, value):
self.classifiers = _ensure_list(value, 'classifiers')
def get_download_url(self):
return self.download_url
# PEP 314
def get_requires(self):
return self.requires or []
def set_requires(self, value):
import distutils.versionpredicate
for v in value:
distutils.versionpredicate.VersionPredicate(v)
self.requires = list(value)
def get_provides(self):
return self.provides or []
def set_provides(self, value):
value = [v.strip() for v in value]
for v in value:
import distutils.versionpredicate
distutils.versionpredicate.split_provision(v)
self.provides = value
def get_obsoletes(self):
return self.obsoletes or []
def set_obsoletes(self, value):
import distutils.versionpredicate
for v in value:
distutils.versionpredicate.VersionPredicate(v)
self.obsoletes = list(value)
def fix_help_options(options):
"""Convert a 4-tuple 'help_options' list as found in various command
classes to the 3-tuple form required by FancyGetopt.
"""
new_options = []
for help_tuple in options:
new_options.append(help_tuple[0:3])
return new_options
|
PypiClean
|
/pk-dlp-2023.3.4.tar.gz/pk-dlp-2023.3.4/yt_dlp/extractor/tokentube.py
|
import functools
import re
from .common import InfoExtractor
from ..utils import (
clean_html,
get_element_by_class,
parse_count,
remove_end,
unified_strdate,
js_to_json,
OnDemandPagedList,
)
class TokentubeIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?tokentube\.net/(?:view\?[vl]=|[vl]/)(?P<id>\d+)'
_TESTS = [{
'url': 'https://tokentube.net/l/3236632011/Praise-A-Thon-Pastori-Chrisin-ja-Pastori-Bennyn-kanssa-27-8-2021',
'info_dict': {
'id': '3236632011',
'ext': 'mp4',
'title': 'Praise-A-Thon Pastori Chrisin ja Pastori Bennyn kanssa 27.8.2021',
'description': '',
'uploader': 'Pastori Chris - Rapsodia.fi',
'upload_date': '20210827',
},
'params': {
'skip_download': True,
},
}, {
'url': 'https://tokentube.net/v/3950239124/Linux-Ubuntu-Studio-perus-k%C3%A4ytt%C3%B6',
'md5': '0e1f00421f501f5eada9890d38fcfb56',
'info_dict': {
'id': '3950239124',
'ext': 'mp4',
'title': 'Linux Ubuntu Studio perus käyttö',
'description': 'md5:46077d0daaba1974f2dc381257f9d64c',
'uploader': 'jyrilehtonen',
'upload_date': '20210825',
},
}, {
'url': 'https://tokentube.net/view?v=3582463289',
'info_dict': {
'id': '3582463289',
'ext': 'mp4',
'title': 'Police for Freedom - toiminta aloitetaan Suomessa ❤️??',
'description': 'md5:37ebf1cb44264e0bf23ed98b337ee63e',
'uploader': 'Voitontie',
'upload_date': '20210428',
}
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
title = self._html_search_regex(r'<h1\s*class=["\']title-text["\']>(.+?)</h1>', webpage, 'title')
data_json = self._html_search_regex(r'({["\']html5["\'].+?}}}+)', webpage, 'data json')
data_json = self._parse_json(js_to_json(data_json), video_id, fatal=False)
sources = data_json.get('sources') or self._parse_json(
self._html_search_regex(r'updateSrc\(([^\)]+)\)', webpage, 'sources'),
video_id, transform_source=js_to_json)
formats = [{
'url': format.get('src'),
'format_id': format.get('label'),
'height': format.get('res'),
} for format in sources]
view_count = parse_count(self._html_search_regex(
r'<p\s*class=["\']views_counter["\']>\s*([\d\.,]+)\s*<span>views?</span></p>',
webpage, 'view_count', fatal=False))
like_count = parse_count(self._html_search_regex(
r'<div\s*class="sh_button\s*likes_count">\s*(\d+)\s*</div>',
webpage, 'like count', fatal=False))
dislike_count = parse_count(self._html_search_regex(
r'<div\s*class="sh_button\s*dislikes_count">\s*(\d+)\s*</div>',
webpage, 'dislike count', fatal=False))
upload_date = unified_strdate(self._html_search_regex(
r'<span\s*class="p-date">Published\s*on\s+([^<]+)',
webpage, 'upload date', fatal=False))
uploader = self._html_search_regex(
r'<a\s*class="place-left"[^>]+>(.+?)</a>',
webpage, 'uploader', fatal=False)
description = (clean_html(get_element_by_class('p-d-txt', webpage))
or self._html_search_meta(('og:description', 'description', 'twitter:description'), webpage))
description = remove_end(description, 'Category')
return {
'id': video_id,
'formats': formats,
'title': title,
'view_count': view_count,
'like_count': like_count,
'dislike_count': dislike_count,
'upload_date': upload_date,
'description': description,
'uploader': uploader,
}
class TokentubeChannelIE(InfoExtractor):
_PAGE_SIZE = 20
IE_NAME = 'Tokentube:channel'
_VALID_URL = r'https?://(?:www\.)?tokentube\.net/channel/(?P<id>\d+)/[^/]+(?:/videos)?'
_TESTS = [{
'url': 'https://tokentube.net/channel/3697658904/TokenTube',
'info_dict': {
'id': '3697658904',
},
'playlist_mincount': 7,
}, {
'url': 'https://tokentube.net/channel/3353234420/Linux/videos',
'info_dict': {
'id': '3353234420',
},
'playlist_mincount': 20,
}, {
'url': 'https://tokentube.net/channel/3475834195/Voitontie',
'info_dict': {
'id': '3475834195',
},
'playlist_mincount': 150,
}]
def _fetch_page(self, channel_id, page):
page += 1
videos_info = self._download_webpage(
f'https://tokentube.net/videos?p=0&m=1&sort=recent&u={channel_id}&page={page}',
channel_id, headers={'X-Requested-With': 'XMLHttpRequest'},
note=f'Downloading page {page}', fatal=False)
if '</i> Sorry, no results were found.' not in videos_info:
for path, media_id in re.findall(
r'<a[^>]+\bhref=["\']([^"\']+/[lv]/(\d+)/\S+)["\'][^>]+>',
videos_info):
yield self.url_result(path, ie=TokentubeIE.ie_key(), video_id=media_id)
def _real_extract(self, url):
channel_id = self._match_id(url)
entries = OnDemandPagedList(functools.partial(
self._fetch_page, channel_id), self._PAGE_SIZE)
return self.playlist_result(entries, channel_id)
|
PypiClean
|
/dp_cgans-0.0.4.tar.gz/dp_cgans-0.0.4/src/dp_cgans/constraints/base.py
|
import copy
import importlib
import inspect
import logging
import warnings
import pandas as pd
from copulas.multivariate.gaussian import GaussianMultivariate
from copulas.univariate import GaussianUnivariate
from rdt import HyperTransformer
from sdv.constraints.errors import MissingConstraintColumnError
LOGGER = logging.getLogger(__name__)
def _get_qualified_name(obj):
"""Return the Fully Qualified Name from an instance or class."""
module = obj.__module__
if hasattr(obj, '__name__'):
obj_name = obj.__name__
else:
obj_name = obj.__class__.__name__
return module + '.' + obj_name
def _module_contains_callable_name(obj):
"""Return if module contains the name of the callable object."""
if hasattr(obj, '__name__'):
obj_name = obj.__name__
else:
obj_name = obj.__class__.__name__
return obj_name in importlib.import_module(obj.__module__).__dict__
def get_subclasses(cls):
"""Recursively find subclasses for the current class object."""
subclasses = dict()
for subclass in cls.__subclasses__():
subclasses[subclass.__name__] = subclass
subclasses.update(get_subclasses(subclass))
return subclasses
def import_object(obj):
"""Import an object from its qualified name."""
if isinstance(obj, str):
package, name = obj.rsplit('.', 1)
return getattr(importlib.import_module(package), name)
return obj
class ConstraintMeta(type):
"""Metaclass for Constraints.
This metaclass replaces the ``__init__`` method with a new function
that stores the arguments passed to the __init__ method in a dict
as the attribute ``__kwargs__``.
This allows us to later on dump the class definition as a dict.
"""
def __init__(self, name, bases, attr):
super().__init__(name, bases, attr)
old__init__ = self.__init__
signature = inspect.signature(old__init__)
arg_names = list(signature.parameters.keys())[1:]
def __init__(self, *args, **kwargs):
class_name = self.__class__.__name__
if name == class_name:
self.__kwargs__ = copy.deepcopy(kwargs)
self.__kwargs__.update(dict(zip(arg_names, args)))
old__init__(self, *args, **kwargs)
__init__.__doc__ = old__init__.__doc__
__init__.__signature__ = signature
self.__init__ = __init__
class Constraint(metaclass=ConstraintMeta):
"""Constraint base class.
This class is not intended to be used directly and should rather be
subclassed to create different types of constraints.
If ``handling_strategy`` is passed with the value ``transform``
or ``reject_sampling``, the ``filter_valid`` or ``transform`` and
``reverse_transform`` methods will be replaced respectively by a simple
identity function.
Attributes:
constraint_columns (tuple[str]):
The names of the columns used by this constraint.
rebuild_columns (typle[str]):
The names of the columns that this constraint will rebuild during
``reverse_transform``.
Args:
handling_strategy (str):
How this Constraint should be handled, which can be ``transform``,
``reject_sampling`` or ``all``.
fit_columns_model (bool):
If False, reject sampling will be used to handle conditional sampling.
Otherwise, a model will be trained and used to sample other columns
based on the conditioned column.
"""
constraint_columns = ()
rebuild_columns = ()
_hyper_transformer = None
_columns_model = None
def _identity(self, table_data):
return table_data
def __init__(self, handling_strategy, fit_columns_model=False):
self.fit_columns_model = fit_columns_model
if handling_strategy == 'transform':
self.filter_valid = self._identity
elif handling_strategy == 'reject_sampling':
self.rebuild_columns = ()
self.transform = self._identity
self.reverse_transform = self._identity
elif handling_strategy != 'all':
raise ValueError('Unknown handling strategy: {}'.format(handling_strategy))
def _fit(self, table_data):
del table_data
def fit(self, table_data):
"""Fit ``Constraint`` class to data.
If ``fit_columns_model`` is True, then this method will fit
a ``GaussianCopula`` model to the relevant columns in ``table_data``.
Subclasses can overwrite this method, or overwrite the ``_fit`` method
if they will not be needing the model to handle conditional sampling.
Args:
table_data (pandas.DataFrame):
Table data.
"""
self._fit(table_data)
if self.fit_columns_model and len(self.constraint_columns) > 1:
data_to_model = table_data[list(self.constraint_columns)]
self._hyper_transformer = HyperTransformer(default_data_type_transformers={
'categorical': 'OneHotEncodingTransformer',
})
transformed_data = self._hyper_transformer.fit_transform(data_to_model)
self._columns_model = GaussianMultivariate(
distribution=GaussianUnivariate
)
self._columns_model.fit(transformed_data)
def _transform(self, table_data):
return table_data
def _reject_sample(self, num_rows, conditions):
sampled = self._columns_model.sample(
num_rows=num_rows,
conditions=conditions
)
sampled = self._hyper_transformer.reverse_transform(sampled)
valid_rows = sampled[self.is_valid(sampled)]
counter = 0
total_sampled = num_rows
while len(valid_rows) < num_rows:
num_valid = len(valid_rows)
if counter >= 100:
if len(valid_rows) == 0:
error = 'Could not get enough valid rows within 100 trials.'
raise ValueError(error)
else:
multiplier = num_rows // num_valid
num_rows_missing = num_rows % num_valid
remainder_rows = valid_rows.iloc[0:num_rows_missing, :]
valid_rows = pd.concat([valid_rows] * multiplier + [remainder_rows],
ignore_index=True)
break
remaining = num_rows - num_valid
valid_probability = (num_valid + 1) / (total_sampled + 1)
max_rows = num_rows * 10
num_to_sample = min(int(remaining / valid_probability), max_rows)
total_sampled += num_to_sample
new_sampled = self._columns_model.sample(
num_rows=num_to_sample,
conditions=conditions
)
new_sampled = self._hyper_transformer.reverse_transform(new_sampled)
new_valid_rows = new_sampled[self.is_valid(new_sampled)]
valid_rows = pd.concat([valid_rows, new_valid_rows], ignore_index=True)
counter += 1
return valid_rows.iloc[0:num_rows, :]
def _sample_constraint_columns(self, table_data):
condition_columns = [c for c in self.constraint_columns if c in table_data.columns]
grouped_conditions = table_data[condition_columns].groupby(condition_columns)
all_sampled_rows = list()
for group, df in grouped_conditions:
if not isinstance(group, tuple):
group = [group]
transformed_condition = self._hyper_transformer.transform(df).iloc[0].to_dict()
sampled_rows = self._reject_sample(
num_rows=df.shape[0],
conditions=transformed_condition
)
all_sampled_rows.append(sampled_rows)
sampled_data = pd.concat(all_sampled_rows, ignore_index=True)
return sampled_data
def _validate_constraint_columns(self, table_data):
"""Validate the columns in ``table_data``.
If ``fit_columns_model`` is False and any columns in ``constraint_columns``
are not present in ``table_data``, this method will raise a
``MissingConstraintColumnError``. Otherwise it will return the ``table_data``
unchanged. If ``fit_columns_model`` is True, then this method will sample
any missing ``constraint_columns`` from its model conditioned on the
``constraint_columns`` that ``table_data`` does contain. If ``table_data``
doesn't contain any of the ``constraint_columns`` then a
``MissingConstraintColumnError`` will be raised.
Args:
table_data (pandas.DataFrame):
Table data.
"""
missing_columns = [col for col in self.constraint_columns if col not in table_data.columns]
if missing_columns:
if not self._columns_model:
warning_message = (
'When `fit_columns_model` is False and we are conditioning on a subset '
'of the constraint columns, conditional sampling uses reject sampling '
'which can be slow. Changing `fit_columns_model` to True can improve '
'the performance.'
)
warnings.warn(warning_message, UserWarning)
all_columns_missing = len(missing_columns) == len(self.constraint_columns)
if self._columns_model is None or all_columns_missing:
raise MissingConstraintColumnError()
else:
sampled_data = self._sample_constraint_columns(table_data)
other_columns = [c for c in table_data.columns if c not in self.constraint_columns]
sampled_data[other_columns] = table_data[other_columns]
return sampled_data
return table_data
def transform(self, table_data):
"""Perform necessary transformations needed by constraint.
Subclasses can optionally overwrite this method. If the transformation
requires certain columns to be present in ``table_data``, then the subclass
should overwrite the ``_transform`` method instead. This method raises a
``MissingConstraintColumnError`` if the ``table_data`` is missing any columns
needed to do the transformation. If columns are present, this method will call
the ``_transform`` method.
Args:
table_data (pandas.DataFrame):
Table data.
Returns:
pandas.DataFrame:
Input data unmodified.
"""
table_data = self._validate_constraint_columns(table_data)
return self._transform(table_data)
def fit_transform(self, table_data):
"""Fit this Constraint to the data and then transform it.
Args:
table_data (pandas.DataFrame):
Table data.
Returns:
pandas.DataFrame:
Transformed data.
"""
self.fit(table_data)
return self.transform(table_data)
def reverse_transform(self, table_data):
"""Identity method for completion. To be optionally overwritten by subclasses.
Args:
table_data (pandas.DataFrame):
Table data.
Returns:
pandas.DataFrame:
Input data unmodified.
"""
return table_data
def is_valid(self, table_data):
"""Say whether the given table rows are valid.
This is a dummy version of the method that returns a series of ``True``
values to avoid dropping any rows. This should be overwritten by all
the subclasses that have a way to decide which rows are valid and which
are not.
Args:
table_data (pandas.DataFrame):
Table data.
Returns:
pandas.Series:
Series of ``True`` values
"""
return pd.Series(True, index=table_data.index)
def filter_valid(self, table_data):
"""Get only the rows that are valid.
The filtering is done by calling the method ``is_valid``, which should
be overwritten by subclasses, while this method should stay untouched.
Args:
table_data (pandas.DataFrame):
Table data.
Returns:
pandas.DataFrame:
Input data unmodified.
"""
valid = self.is_valid(table_data)
invalid = sum(~valid)
if invalid:
LOGGER.debug('%s: %s invalid rows out of %s.',
self.__class__.__name__, sum(~valid), len(valid))
if isinstance(valid, pd.Series):
return table_data[valid.values]
return table_data[valid]
@classmethod
def from_dict(cls, constraint_dict):
"""Build a Constraint object from a dict.
Args:
constraint_dict (dict):
Dict containing the keyword ``constraint`` alongside
any additional arguments needed to create the instance.
Returns:
Constraint:
New constraint instance.
"""
constraint_dict = constraint_dict.copy()
constraint_class = constraint_dict.pop('constraint')
subclasses = get_subclasses(cls)
if isinstance(constraint_class, str):
if '.' in constraint_class:
constraint_class = import_object(constraint_class)
else:
constraint_class = subclasses[constraint_class]
return constraint_class(**constraint_dict)
def to_dict(self):
"""Return a dict representation of this Constraint.
The dictionary will contain the Qualified Name of the constraint
class in the key ``constraint``, as well as any other arguments
that were passed to the constructor when the instance was created.
Returns:
dict:
Dict representation of this Constraint.
"""
constraint_dict = {
'constraint': _get_qualified_name(self.__class__),
}
for key, obj in copy.deepcopy(self.__kwargs__).items():
if callable(obj) and _module_contains_callable_name(obj):
constraint_dict[key] = _get_qualified_name(obj)
else:
constraint_dict[key] = obj
return constraint_dict
|
PypiClean
|
/msgraph-sdk-1.0.0a3.tar.gz/msgraph-sdk-1.0.0a3/msgraph/generated/users/item/authentication/microsoft_authenticator_methods/item/device/registered_users/user/user_request_builder.py
|
from __future__ import annotations
from dataclasses import dataclass
from kiota_abstractions.get_path_parameters import get_path_parameters
from kiota_abstractions.method import Method
from kiota_abstractions.request_adapter import RequestAdapter
from kiota_abstractions.request_information import RequestInformation
from kiota_abstractions.request_option import RequestOption
from kiota_abstractions.response_handler import ResponseHandler
from kiota_abstractions.serialization import Parsable, ParsableFactory
from typing import Any, Callable, Dict, List, Optional, Union
from .........models import user_collection_response
from .........models.o_data_errors import o_data_error
from .count import count_request_builder
class UserRequestBuilder():
"""
Casts the previous resource to user.
"""
def count(self) -> count_request_builder.CountRequestBuilder:
"""
Provides operations to count the resources in the collection.
"""
return count_request_builder.CountRequestBuilder(self.request_adapter, self.path_parameters)
def __init__(self,request_adapter: RequestAdapter, path_parameters: Optional[Union[Dict[str, Any], str]] = None) -> None:
"""
Instantiates a new UserRequestBuilder and sets the default values.
Args:
pathParameters: The raw url or the Url template parameters for the request.
requestAdapter: The request adapter to use to execute the requests.
"""
if path_parameters is None:
raise Exception("path_parameters cannot be undefined")
if request_adapter is None:
raise Exception("request_adapter cannot be undefined")
# Url template to use to build the URL for the current request builder
self.url_template: str = "{+baseurl}/users/{user%2Did}/authentication/microsoftAuthenticatorMethods/{microsoftAuthenticatorAuthenticationMethod%2Did}/device/registeredUsers/microsoft.graph.user{?%24top,%24skip,%24search,%24filter,%24count,%24orderby,%24select,%24expand}"
url_tpl_params = get_path_parameters(path_parameters)
self.path_parameters = url_tpl_params
self.request_adapter = request_adapter
def create_get_request_information(self,request_configuration: Optional[UserRequestBuilderGetRequestConfiguration] = None) -> RequestInformation:
"""
Get the items of type microsoft.graph.user in the microsoft.graph.directoryObject collection
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: RequestInformation
"""
request_info = RequestInformation()
request_info.url_template = self.url_template
request_info.path_parameters = self.path_parameters
request_info.http_method = Method.GET
request_info.headers["Accept"] = "application/json"
if request_configuration:
request_info.add_request_headers(request_configuration.headers)
request_info.set_query_string_parameters_from_raw_object(request_configuration.query_parameters)
request_info.add_request_options(request_configuration.options)
return request_info
async def get(self,request_configuration: Optional[UserRequestBuilderGetRequestConfiguration] = None, response_handler: Optional[ResponseHandler] = None) -> Optional[user_collection_response.UserCollectionResponse]:
"""
Get the items of type microsoft.graph.user in the microsoft.graph.directoryObject collection
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
responseHandler: Response handler to use in place of the default response handling provided by the core service
Returns: Optional[user_collection_response.UserCollectionResponse]
"""
request_info = self.create_get_request_information(
request_configuration
)
error_mapping: Dict[str, ParsableFactory] = {
"4XX": o_data_error.ODataError,
"5XX": o_data_error.ODataError,
}
if not self.request_adapter:
raise Exception("Http core is null")
return await self.request_adapter.send_async(request_info, user_collection_response.UserCollectionResponse, response_handler, error_mapping)
@dataclass
class UserRequestBuilderGetQueryParameters():
"""
Get the items of type microsoft.graph.user in the microsoft.graph.directoryObject collection
"""
# Include count of items
count: Optional[bool] = None
# Expand related entities
expand: Optional[List[str]] = None
# Filter items by property values
filter: Optional[str] = None
# Order items by property values
orderby: Optional[List[str]] = None
# Search items by search phrases
search: Optional[str] = None
# Select properties to be returned
select: Optional[List[str]] = None
# Skip the first n items
skip: Optional[int] = None
# Show only the first n items
top: Optional[int] = None
def get_query_parameter(self,original_name: Optional[str] = None) -> str:
"""
Maps the query parameters names to their encoded names for the URI template parsing.
Args:
originalName: The original query parameter name in the class.
Returns: str
"""
if original_name is None:
raise Exception("original_name cannot be undefined")
if original_name == "count":
return "%24count"
if original_name == "expand":
return "%24expand"
if original_name == "filter":
return "%24filter"
if original_name == "orderby":
return "%24orderby"
if original_name == "search":
return "%24search"
if original_name == "select":
return "%24select"
if original_name == "skip":
return "%24skip"
if original_name == "top":
return "%24top"
return original_name
@dataclass
class UserRequestBuilderGetRequestConfiguration():
"""
Configuration for the request such as headers, query parameters, and middleware options.
"""
# Request headers
headers: Optional[Dict[str, str]] = None
# Request options
options: Optional[List[RequestOption]] = None
# Request query parameters
query_parameters: Optional[UserRequestBuilder.UserRequestBuilderGetQueryParameters] = None
|
PypiClean
|
/taskcc-alipay-sdk-python-3.3.398.tar.gz/taskcc-alipay-sdk-python-3.3.398/alipay/aop/api/request/KoubeiCateringDishRuleCreateRequest.py
|
import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.KoubeiCateringDishRuleCreateModel import KoubeiCateringDishRuleCreateModel
class KoubeiCateringDishRuleCreateRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, KoubeiCateringDishRuleCreateModel):
self._biz_content = value
else:
self._biz_content = KoubeiCateringDishRuleCreateModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'koubei.catering.dish.rule.create'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
|
PypiClean
|
/cfcapi-1.0.1.tar.gz/cfcapi-1.0.1/README.md
|
[//]: # (1 Project's title)
# cfcapi
[//]: # (2 Project description)
`Python` API to interact with the Caonabo Fluidic Controller (CFC).
The CFC is a board that allows one to control a total of 50 valves to manipulate the flow of chemicals.
It is composed of five valve modules and one control module. Each valve module is composed of the following peripherals:
- Darlington open collector transistor outputs with flyback protection diodes for controlling valves (10)
- Input / output TTL (5.0V) digital ports (5)
- Module indicator LED (1)
From the 10 valve outputs of each valve module, 6 of them (from index 0 to 5) can drive valves of up to 0.5A,
the remaining 4 (6 to 9) can handle up to 1A. All of them can handle up to 50V.
The control module is built around an Arduino Nano 33 BLE module and is composed of the following peripherals:
- Input / output CMOS (3.3V) digital ports (10)
- Analog (0V to 3.3V) inputs (7)
- Module indicator RGB LED (1)
This API allows the manipulation of all these peripherals in `Python`. For more information regarding the CFC hardware please visit its [repository](https://github.imec.be/dna-storage/cfc).
[//]: # (3 Table of contents)
## Table of contents <a name="table-contents"></a>
1. [Installation and package dependencies](#installation)
2. [How to use the package](#use)
+ [2.1 Instantiation, initiation and closing comunication to the board.](#instantiation)
+ [2.2 Working with digital Input/Ouput (IO) ports and analog ports.](#io)
+ [2.3 Working with valves.](#valves)
3. [API reference guide](#reference)
4. [Contributors](#contributors)
5. [License](#license)
[//]: # (4 Package dependencies)
## 1 Installation and package dependencies <a name="installation"></a>
This packager requires the previous installation of the following packages:
- [pyserial 3.5 (or newer)](https://pypi.org/project/pyserial/)
Afer installing the dependencies, the package can be installed from the Python package index (`PyPI`) repository.
In Windows:
```PowerShell
C:\> pip install --user cfcapi
```
or in Linux:
```bash
$ pip install --user cfcapi
```
As an alternative, the cfcapi package (inside the `src/` folder) can be download and copied into the the main folder of the project where it will be used.
[//]: # (5 How to use the package)
## 2 How to use the package <a name="use"></a>
### 2.1 Instantiation, initiation and closing communication to the board. <a name="instantiation"></a>
First, the module must be imported:
```python
>>> from cfcapi import board
```
Once imported the cfc class inside the module must be instantatiated to gain control to an specific CFC board.
Hence, the port where the CFC board is connected, as well as the ID of the board, must be specified.
For the port, the name of the port can be given such as `"COM1"` or `"AUTO"` can be used.
Sometimes, `"AUTO"` might not work due to conflict with some USB devices.
If this happens, the port will have to be passed manually. An example instantiations can be as folows:
```python
>>> cfc = board.cfc(port="AUTO", board_id="000")
```
Once initiated, the following output will appear in the console:
> Caonabo Fluidic Controller (CFC) with correct ID initiated in port: COM26.
> CFC Modules addresses: \['2', '3', '4', '5', '6'\]
The last line identifies the addresses of the 5 different valve modules (from 0 to 4),
and should agree with the physical address configured for each module with the DIP switches.
If this does not match, communication with that module will fail.
To test that the communication to each module works the the `testModule()` method can be used.
For example:
```python
>>> cfc.testModules()
```
The *Module indicator LEDs* of each module will turn on for 0.5s sequentially from module 0 to module 4.
If the communication to any of the module fails, the respective LED will not be turned on.
The first thing to check while debugging a possible problem is the that the physical address (set with the DIP switch)
matches with that of the firmware (the one shown when instantiating or when using the method `getAddress()`).
At the end, the instance must be properly closed to avoid leaving the serial ports open. This is done by using the `close()` method:
```python
>>> cfc.close()
```
### 2.2 Working with digital input/ouput (IO) ports and analog ports. <a name="io"></a>
For working the IO ports, they first have to be configured.
By default, the digital ports of the modules (pin 10 to 14 of pin header) are configured as *inputs*.
The index for addressing the IOs of the modules is from 0 to 4 linked to physical pins 10 to 14.
The IOs of the Arduino module (D2 to D10) are also initiated as *inputs*.
The indexes to address the IOs from the Arduino module runs from 2 to 10.
**To configure** the IO ports, the method `confIO(mode_list, module)` must be used.
The first parameter required is a list of integers that represents whether the pin will act as an input (0) or an output (1).
The amount of elements in the list should match the amount of IOs in the port to be configured (*i.e.*, 5 for the valve modules and 9 for the Arduino module).
The second parameter is the module: for valve modules a value between `0` to `4` (integer) can be used,
and for the Arduino module the `"A"` character must be used.
By default (*i.e.*, if no module value is passed) the Arduino module is selected.
To configure the first three (0, 1 and 2) IO ports of module 0 as outputs, and the last two (3 and 4) as inputs the following example can be used:
```python
>>> cfc.confIO(mode_list=[1, 1, 1, 0, 0], module=0)
```
To configure all the IOs of the Arduino module (D2 to D10) as outputs, the following example can be used:
```python
>>> cfc.confIO(mode_list=[1, 1, 1, 1, 1, 1, 1, 1, 1], module="A")
```
Once configured, the IO port can be **writen** by using the method `writeIO(ioNumber, value, module)`,
or **read** using the method `readIO(ioNumber, module)`.
The parameter `ioNumber` is the number identifying the IO (from 0 to 4 for valve modules and from 2 to 10 for Arduino module).
The value parameter (only for writing) specify whether the IO port will be 1 (5V for valve modules and 3.3V for Arduino module) or 0 (GND for all modules).
Finally, the module parameter (0 to 4 or "A") will identify the module to which the IO belongs.
For example, to write 1 to IO 0 of module 0 you can use:
```python
>>> cfc.writeIO(ioNumber=0, value=1, module=0)
```
and to read from IO D9 of the Arduino module can use:
```python
>>> cfc.readIO(ioNumber=9, module="A")` or `>>> cfc.readIO(ioNumber=9)
```
The **analog** ports in the Arduino module (A0 to A6) can be **read** using the method `analogRead(aNumber)`.
The `aNumber` parmeter is simply the number of the analog port to be read (from `0` to `6`).
The method returns a value between 0 and 1023, proportional to the voltage in the respective port (from 0 to 3.3v).
### 2.3 Working with valves. <a name="valves"></a>
To **set a valve** output the method `setValve(valve)` must be used, and to **to clear a valve** the method `clearValve(valve)` must be used.
The `valve` parameter is value from `0` to `49` identifiying the valve.
The first digit in the number identifies the module and second digit identifies the valve.
For example, valve `45` is valve with index `5` on module with index `4`.
Similarly, valve `3` (same as `03`) identifies the valve with index `3` in module with index `0`.
For example, setting and clearing valve 3 of module 2 can be done with:
```python
>>> cfc.setValve(23)
```
and
```python
>>> cfc.clearValve(23)
```
Additionally, the valves can be **activated with a pulse** with the method `pulse(valve, tON, tOFF, periods)`.
The `valve` parameter is the same as for the previous methods, where `tON` and `tOFF` are use to specify
the amount of time (in seconds) that the valve will remain on and off, respectively. Parameter `tOFF` is set to `0`.
The last parameter `periods` is use to define how many times the cycle `tON` and `tOF` must be repeated.
By default, `periods` is set to 1.
If it is desired to set on valve 23 for one second the following code can be used:
```python
>>> cfc.pulse(valve=23, tON=1, tOFF=0, periods=1)
```
or
```python
>>> cfc.pulse(valve=23, tON=1)
```
[//]: # (6 API Reference Guide)
## 3 API Reference Guide <a name="reference"></a>
| | Method | Description | Parameters | Returns | Example |
| -- |--------|-------------|------------|---------|---------|
| 00 | getID | Returns the ID of the CFC board. | NA | ID (string) | `cfc.getID()` |
| 01 |getAddress | Returns the address of the modules configured in the firmware. | NA | addresses (list of strings) | `cfc.getAddress()` |
| 02 |rgbLED | Set status of the RGB LED in the Arduino nano 33 BLE board ON (1) or OFF (0). | r, g, b (int) | NA | `cfc.rgbLED(0,1,0)` |
| 03 |confIO | Configures the IO ports available in either the arduino or the valve modules. | mode_list (str), module="A" (str or int) | NA | `cfc.confIO("11100",0)` |
| 04 |writeIO | Write to the IO ports of the Arduino or the valve modules. | ioNumber (int), value (int - 0 or 1), module="A" (int of str) | NA | `cfc.writeIO(3,1,"A")` |
| 05 |readIO | Read the digital ports of the Arduino, or the valve modules. | ioNumber (int), module="A" (int of str) | value (int - 0 or 1) | `cfc.readIO(3,"A")` |
| 06 |analogRead | Read the analog ports of the Arduino. *Z<sub>IN</sub>* is 10<sup>8</sup> *ohm*. | aNumber (int) | value (int - 0 to 1023) | `cfc.analogRead(2)` |
| 07 |moduleLED | Manipulate the LEDs in the different valve modules of the CFC. |module, value (int) | NA | `cfc.moduleLED(0,1)` |
| 08 |setValve | Set a valve on the CFC. The value can go from 0 to 49. | valve (int) | NA | `cfc.setValve(15)` |
| 09 |clearValve | Clear a valve on the CFC. The value can go from 0 to 49. | valve (int) | NA | `cfc.clearValve(15)` |
| 10 |pulse | Create a tren of pulses activating one of the valves. | valve, tON, tOFF=0, periods=1 (int) | NA | `cfc.pulse(15,1,0,1)` |
| 11 |testModules | Activate the LED on the modules sequentially. The LED remains ON for t seconds. | t=0.5 (float) | NA | `cfc.testModules()` |
| 12 | discoverPort | Discover the USB port to which the CFC is connected. | NA | port (str) | `cfc.discoverPort()` |
| 13 | com | Start serial communication with the CFC. Runs once during instantiation. | port="NA" (str), bit_rate="NA" (str or int), board_id="NA" (str), timeout="NA" (Str or float) | NA | `cfc.com("COM4",115200, "000", 1)` |
| 14 | write | Write string of characters to the CFC through the serial bus. | string (str) | response (str) | `cfc.write("ID?")` |
| 15 | read | Read string of characters from the CFC through the serial bus. | NA | response (str) | `cfc.read()` |
| 16 | acknowledgeWrite | Check for the acknowledgement response from the CFC after writing a command that does not require a return from the CFC.| NA | NA | `cfc.acknowledgeWrite()` |
| 17 |close | Method to close the communication to the CFC. | NA | NA | `cfc.close()` |
| 18 |open | Method to open communication to the CFC. | NA | NA | `cfc.open()` |
[//]: # (7 Contributors)
## 4 Contributors <a name="contributors"></a>
- [César Javier Lockhart de la Rosa ([email protected])](https://github.imec.be/lockhart)
- [Kherim Willems ([email protected])](https://github.imec.be/willemsk)
- [Naghmeh Fatemi ([email protected])](https://github.imec.be/fatemi94)
[//]: # (8-License)
## 5 License <a name="license"></a>
Copyright (c) 2022 [César J. Lockhart de la Rosa ([email protected])](https://github.imec.be/lockhart)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
|
PypiClean
|
/ensmallen_graph-0.6.0-cp37-cp37m-manylinux2010_x86_64.whl/ensmallen_graph/datasets/string/metarhiziumrobertsii.py
|
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen_graph import EnsmallenGraph # pylint: disable=import-error
def MetarhiziumRobertsii(
directed: bool = False,
verbose: int = 2,
cache_path: str = "graphs/string",
**additional_graph_kwargs: Dict
) -> EnsmallenGraph:
"""Return new instance of the Metarhizium robertsii graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False,
Wether to load the graph as directed or undirected.
By default false.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache_path: str = "graphs",
Where to store the downloaded graphs.
additional_graph_kwargs: Dict,
Additional graph kwargs.
Returns
-----------------------
Instace of Metarhizium robertsii graph.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-02 21:13:09.798988
The undirected graph Metarhizium robertsii has 7713 nodes and 577767 weighted
edges, of which none are self-loops. The graph is dense as it has a density
of 0.01943 and has 18 connected components, where the component with most
nodes has 7679 nodes and the component with the least nodes has 2 nodes.
The graph median node degree is 82, the mean node degree is 149.82, and
the node degree mode is 1. The top 5 most central nodes are 568076.XP_007819684.1
(degree 1989), 568076.XP_007817038.1 (degree 1554), 568076.XP_007823864.1
(degree 1470), 568076.XP_007818861.1 (degree 1371) and 568076.XP_007822974.1
(degree 1352).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import MetarhiziumRobertsii
# Then load the graph
graph = MetarhiziumRobertsii()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
return AutomaticallyRetrievedGraph(
graph_name="MetarhiziumRobertsii",
dataset="string",
directed=directed,
verbose=verbose,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
|
PypiClean
|
/BoltzTraP2-22.12.1.tar.gz/BoltzTraP2-22.12.1/external/spglib-1.9.9/database/make_VSpU.py
|
import sys
import numpy as np
from sage.all import *
def get_VSpU( M ):
N = copy(M)
for i in range(N.nrows()):
N[i,i%3] -= 1
S,U,V = N.smith_form()
Sp = Matrix( QQ, S.transpose() )
for i in range(3):
if not Sp[i,i]==0:
Sp[i,i] = 1.0 / Sp[i,i]
return V*Sp*U
def get_VSpU_sets(g1s, g2s):
VSpU_sets = []
generator_sets = []
for g3 in ( False, True ): # for inversion
for g1 in g1s:
for g2 in g2s:
if g2 is not False:
if np.equal(g1, g2).all():
continue
if np.equal(g1, np.dot(inv, g2)).all():
continue
genes = [g1, g2]
else:
genes = [g1,]
if g3:
genes_new = []
for g in genes:
if np.linalg.det(g) < 0:
genes_new.append(np.dot(inv, g))
else:
genes_new.append(g)
genes_new.append(inv)
if np.linalg.det(g1) < 0:
is_found = False
for g1_comp in g1s:
if np.equal(genes_new[0], g1_comp).all():
is_found = True
break
if is_found:
continue
if g2 is not False:
if np.linalg.det(g2) < 0:
is_found = False
for g2_comp in g2s:
if np.equal(genes_new[1], g2_comp).all():
is_found = True
break
if is_found:
continue
else:
genes_new = genes
M = Matrix(3, 3, genes_new[0])
if len(genes_new) > 1:
for g in genes_new[1:]:
M = M.stack(Matrix(3, 3, g))
VSpU_sets.append(get_VSpU(M))
generator_sets.append( M )
return VSpU_sets, generator_sets
def get_rotation_primitive( g1s, g2s, T ):
if T is not None:
g1s_new = []
g2s_new = []
g1s_old = []
g2s_old = []
for g in g1s:
# print g, "-->"
try:
M = Matrix(ZZ,T*Matrix(QQ, g)*T.inverse())
# print M
g1s_new.append(np.array(M))
g1s_old.append(g)
except TypeError:
print "Not integer matrix, pass this matrix"
for g in g2s:
if g:
# print g, "-->"
try:
M = Matrix(ZZ, T*Matrix(QQ, g)*T.inverse())
# print M
g2s_new.append(np.array(M))
g2s_old.append(g)
except TypeError:
print "Not integer matrix, pass this matrix"
else:
g2s_new.append(False)
g2s_old.append(False)
return g1s_new, g2s_new, g1s_old, g2s_old
def write_generators(generator_sets):
print "{"
for count, M in enumerate( generator_sets ):
print " { /* %d */" % (count+1)
for i in range( 3 ):
print " { ",
for j in range( 3 ):
for k in range( 3 ):
if M.nrows() // 3 > i:
print "%d," % M[i*3+j,k],
else:
print " 0,",
print "},"
print " },"
print "};"
def write_VSpU(VSpU_sets):
print "{"
for count, VSpU in enumerate( VSpU_sets ):
print " { /* %d */" % (count+1)
for colvals in VSpU:
print " {",
for i in range( 9 ):
if i < len( colvals ):
num = colvals[i].numerator()
den = colvals[i].denominator()
if den == 1:
print "%d," % ( num ),
else:
print "%d.0/%d," % ( num, den ),
else:
print " 0,",
print "},"
print " },"
print "};"
identity = [[ 1, 0, 0],
[ 0, 1, 0],
[ 0, 0, 1]]
inv = [[-1, 0, 0],
[ 0,-1, 0],
[ 0, 0,-1]]
# Tetragonal
rot4z = [[ 0,-1, 0],
[ 1, 0, 0],
[ 0, 0, 1]]
rot4zi = [[ 0, 1, 0],
[-1, 0, 0],
[ 0, 0,-1]]
rot2z = [[-1, 0, 0],
[ 0,-1, 0],
[ 0, 0, 1]]
rot2zi = [[ 1, 0, 0],
[ 0, 1, 0],
[ 0, 0,-1]]
rot2x = [[ 1, 0, 0],
[ 0,-1, 0],
[ 0, 0,-1]]
rot2xi = [[-1, 0, 0],
[ 0, 1, 0],
[ 0, 0, 1]]
rot3xyz = [[ 0, 0, 1],
[ 1, 0, 0],
[ 0, 1, 0]]
rot3xyzi = [[ 0, 0,-1],
[-1, 0, 0],
[ 0,-1, 0]]
hexa2_ab = [[ 0,-1, 0],
[-1, 0, 0],
[ 0, 0,-1]]
hexa2_abi = [[ 0, 1, 0],
[ 1, 0, 0],
[ 0, 0, 1]]
rot6z = [[ 1,-1, 0],
[ 1, 0, 0],
[ 0, 0, 1]]
rot6zi = [[-1, 1, 0],
[-1, 0, 0],
[ 0, 0,-1]]
trigo2ab = [[ 0, 1, 0],
[ 1, 0, 0],
[ 0, 0,-1]]
trigo2abi = [[ 0,-1, 0],
[-1, 0, 0],
[ 0, 0, 1]]
rot3z = [[ 0,-1, 0],
[ 1,-1, 0],
[ 0, 0, 1]]
rot3zi = [[ 0, 1, 0],
[-1, 1, 0],
[ 0, 0,-1]]
rot2y = [[-1, 0, 0],
[ 0, 1, 0],
[ 0, 0,-1]]
rot2yi = [[ 1, 0, 0],
[ 0,-1, 0],
[ 0, 0, 1]]
BCC = Matrix(QQ, [[ 0, 1, 1],
[ 1, 0, 1],
[ 1, 1, 0]])
FCC = Matrix(QQ, [[-1, 1, 1],
[ 1,-1, 1],
[ 1, 1,-1]])
CC = Matrix(QQ, [[ 1,-1, 0],
[ 1, 1, 0],
[ 0, 0, 1]])
BC = Matrix(QQ, [[ 1, 0, 1],
[ 0, 1, 0],
[-1, 0, 1]])
AC = Matrix(QQ, [[ 1, 0, 0],
[ 0, 1, 1],
[ 0,-1, 1]])
RC = Matrix(QQ, [[ 1, 0, 1],
[-1, 1, 1],
[ 0,-1, 1]])
# Parse options
from optparse import OptionParser
parser = OptionParser()
parser.set_defaults(is_tricli=False,
is_monocli=False,
is_ortho=False,
is_tetra=False,
is_rhombo=False,
is_trigo=False,
is_hexa=False,
is_cubic=False,
is_bcc=False,
is_fcc=False,
is_ac=False,
is_bc=False,
is_cc=False,
is_rc=False)
parser.add_option("--tricli", dest="is_tricli",
action="store_true")
parser.add_option("--monocli", dest="is_monocli",
action="store_true")
parser.add_option("--ortho", dest="is_ortho",
action="store_true")
parser.add_option("--tetra", dest="is_tetra",
action="store_true")
parser.add_option("--rhombo", dest="is_rhombo",
action="store_true")
parser.add_option("--trigo", dest="is_trigo",
action="store_true")
parser.add_option("--hexa", dest="is_hexa",
action="store_true")
parser.add_option("--cubic", dest="is_cubic",
action="store_true")
parser.add_option("--bcc", dest="is_bcc",
action="store_true")
parser.add_option("--fcc", dest="is_fcc",
action="store_true")
parser.add_option("--ac", dest="is_ac",
action="store_true")
parser.add_option("--bc", dest="is_bc",
action="store_true")
parser.add_option("--cc", dest="is_cc",
action="store_true")
parser.add_option("--rc", dest="is_rc",
action="store_true")
parser.add_option("-g", dest="is_generator",
action="store_true")
(options, args) = parser.parse_args()
g1s = None
g2s = None
g1s_old = None
g2s_old = None
if options.is_tricli:
g1s = ( identity, )
g2s = ( False, )
if options.is_monocli:
g1s = ( rot2x, rot2y, rot2z, rot2xi, rot2yi, rot2zi )
g2s = ( False, )
if options.is_bcc:
g1s, g2s, g1s_old, g2s_old = get_rotation_primitive( g1s, g2s, BCC )
if options.is_ac:
g1s, g2s, g1s_old, g2s_old = get_rotation_primitive( g1s, g2s, AC )
if options.is_bc:
g1s, g2s, g1s_old, g2s_old = get_rotation_primitive( g1s, g2s, BC )
if options.is_cc:
g1s, g2s, g1s_old, g2s_old = get_rotation_primitive( g1s, g2s, CC )
if options.is_ortho:
g1s = ( rot2z, rot2zi )
g2s = ( rot2x, rot2xi )
if options.is_bcc:
g1s, g2s, g1s_old, g2s_old = get_rotation_primitive( g1s, g2s, BCC )
if options.is_fcc:
g1s, g2s, g1s_old, g2s_old = get_rotation_primitive( g1s, g2s, FCC )
if options.is_ac:
g1s, g2s, g1s_old, g2s_old = get_rotation_primitive( g1s, g2s, AC )
if options.is_bc:
g1s, g2s, g1s_old, g2s_old = get_rotation_primitive( g1s, g2s, BC )
if options.is_cc:
g1s, g2s, g1s_old, g2s_old = get_rotation_primitive( g1s, g2s, CC )
if options.is_tetra:
g1s = ( rot4z, rot4zi )
g2s = ( False, rot2x, rot2xi )
if options.is_bcc:
g1s, g2s, g1s_old, g2s_old = get_rotation_primitive( g1s, g2s, BCC )
# if options.is_rhombo:
# g1s = ( rot3xyz, rot3xyzi )
# g2s = ( False, hexa2_ab, trigo2ab )
if options.is_trigo:
g1s = ( rot3z, rot3zi )
g2s = ( False, hexa2_ab, trigo2ab, hexa2_abi, trigo2abi )
if options.is_rc: # hP
g1s, g2s, g1s_old, g2s_old = get_rotation_primitive( g1s, g2s, RC )
if options.is_rhombo: # hR
g1s = ( rot3z, rot3zi )
g2s = ( False, hexa2_ab, trigo2ab, hexa2_abi, trigo2abi )
g1s, g2s, g1s_old, g2s_old = get_rotation_primitive( g1s, g2s, RC )
g1s_old = None
g2s_old = None
if options.is_hexa:
g1s = ( rot6z, rot6zi )
g2s = ( False, hexa2_ab, hexa2_abi )
if options.is_cubic:
g1s = ( rot4z, rot2z, rot4zi, rot2zi )
g2s = ( rot3xyz, rot3xyzi )
if options.is_bcc:
g1s, g2s, g1s_old, g2s_old = get_rotation_primitive( g1s, g2s, BCC )
if options.is_fcc:
g1s, g2s, g1s_old, g2s_old = get_rotation_primitive( g1s, g2s, FCC )
if g1s is None:
print "Option is needed. See \'make_VSpU.py -h\'"
else:
if g1s_old is None:
VSpU_sets, generator_sets = get_VSpU_sets( g1s, g2s )
else:
VSpU_sets = get_VSpU_sets( g1s, g2s )[0]
generator_sets = get_VSpU_sets( g1s_old, g2s_old )[1]
centering = ""
if options.is_fcc:
centering = "_F"
if options.is_bcc:
centering = "_I"
if options.is_ac:
centering = "_A"
if options.is_bc:
centering = "_B"
if options.is_cc:
centering = "_C"
if options.is_generator:
if options.is_tricli:
print "static int tricli_generators[][3][9] ="
if options.is_monocli:
print "static int monocli_generators[][3][9] ="
if options.is_ortho:
print "static int ortho_generators[][3][9] ="
if options.is_tetra:
print "static int tetra_generators[][3][9] ="
if options.is_trigo:
if options.is_rc:
print "static int rhombo_h_generators[][3][9] ="
else:
print "static int trigo_generators[][3][9] ="
if options.is_rhombo:
print "static int rhombo_p_generators[][3][9] ="
if options.is_hexa:
print "static int hexa_generators[][3][9] ="
if options.is_cubic:
print "static int cubic_generators[][3][9] ="
write_generators(generator_sets)
print
else:
if options.is_tricli:
print "static double tricli_VSpU[][3][9] ="
if options.is_monocli:
print "static double monocli%s_VSpU[][3][9] =" % centering
if options.is_ortho:
print "static double ortho%s_VSpU[][3][9] =" % centering
if options.is_tetra:
print "static double tetra%s_VSpU[][3][9] =" % centering
if options.is_trigo:
if options.is_rc:
print "static double rhombo_h_VSpU[][3][9] ="
else:
print "static double trigo_VSpU[][3][9] ="
if options.is_rhombo:
print "static double rhombo_p_VSpU[][3][9] ="
if options.is_hexa:
print "static double hexa_VSpU[][3][9] ="
if options.is_cubic:
print "static double cubic%s_VSpU[][3][9] ="
write_VSpU(VSpU_sets)
print
|
PypiClean
|
/ais-libpythonpro-0.2.tar.gz/ais-libpythonpro-0.2/README.md
|
# libpythonpro
Módulo para exemplificar construção de projetos Python no curso PyTools
Suportada Versão do Python 3
[](https://pyup.io/repos/github/Aislanfagundes/libpythonpro/)
[](https://pyup.io/repos/github/Aislanfagundes/libpythonpro/)
Para instalar:
"""
console
python3 -m venv .venv
source .venv\Scripts\activate
pip install -r requirements-dev.txt
"""
Para conferir qualidade do código:
'''
console
flake8
'''
|
PypiClean
|
/pulumi_yandex_unofficial-0.1.8.tar.gz/pulumi_yandex_unofficial-0.1.8/pulumi_yandex_unofficial/get_container_repository.py
|
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = [
'GetContainerRepositoryResult',
'AwaitableGetContainerRepositoryResult',
'get_container_repository',
'get_container_repository_output',
]
@pulumi.output_type
class GetContainerRepositoryResult:
"""
A collection of values returned by getContainerRepository.
"""
def __init__(__self__, id=None, name=None, repository_id=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if repository_id and not isinstance(repository_id, str):
raise TypeError("Expected argument 'repository_id' to be a str")
pulumi.set(__self__, "repository_id", repository_id)
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="repositoryId")
def repository_id(self) -> str:
return pulumi.get(self, "repository_id")
class AwaitableGetContainerRepositoryResult(GetContainerRepositoryResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetContainerRepositoryResult(
id=self.id,
name=self.name,
repository_id=self.repository_id)
def get_container_repository(name: Optional[str] = None,
repository_id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetContainerRepositoryResult:
"""
Get information about a Yandex Container Repository. For more information, see
[the official documentation](https://cloud.yandex.com/docs/container-registry/concepts/repository)
## Example Usage
```python
import pulumi
import pulumi_yandex as yandex
repo_1 = yandex.get_container_repository(name="some_repository_name")
repo_2 = yandex.get_container_repository(repository_id="some_repository_id")
```
:param str name: Name of the repository. The name of the repository should start with id of a container registry and match the name of the images in the repository.
:param str repository_id: The ID of a specific repository.
"""
__args__ = dict()
__args__['name'] = name
__args__['repositoryId'] = repository_id
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('yandex:index/getContainerRepository:getContainerRepository', __args__, opts=opts, typ=GetContainerRepositoryResult).value
return AwaitableGetContainerRepositoryResult(
id=__ret__.id,
name=__ret__.name,
repository_id=__ret__.repository_id)
@_utilities.lift_output_func(get_container_repository)
def get_container_repository_output(name: Optional[pulumi.Input[Optional[str]]] = None,
repository_id: Optional[pulumi.Input[Optional[str]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetContainerRepositoryResult]:
"""
Get information about a Yandex Container Repository. For more information, see
[the official documentation](https://cloud.yandex.com/docs/container-registry/concepts/repository)
## Example Usage
```python
import pulumi
import pulumi_yandex as yandex
repo_1 = yandex.get_container_repository(name="some_repository_name")
repo_2 = yandex.get_container_repository(repository_id="some_repository_id")
```
:param str name: Name of the repository. The name of the repository should start with id of a container registry and match the name of the images in the repository.
:param str repository_id: The ID of a specific repository.
"""
...
|
PypiClean
|
/alipay_sdk_python-3.6.740-py3-none-any.whl/alipay/aop/api/request/AlipayCommerceTransportTaxiOrderinfoSyncRequest.py
|
import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.AlipayCommerceTransportTaxiOrderinfoSyncModel import AlipayCommerceTransportTaxiOrderinfoSyncModel
class AlipayCommerceTransportTaxiOrderinfoSyncRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, AlipayCommerceTransportTaxiOrderinfoSyncModel):
self._biz_content = value
else:
self._biz_content = AlipayCommerceTransportTaxiOrderinfoSyncModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'alipay.commerce.transport.taxi.orderinfo.sync'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
|
PypiClean
|
/pulumi_azure_nextgen-0.6.2a1613157620.tar.gz/pulumi_azure_nextgen-0.6.2a1613157620/pulumi_azure_nextgen/storage/v20200801preview/queue.py
|
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = ['Queue']
class Queue(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
queue_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Create a Queue resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_name: The name of the storage account within the specified resource group. Storage account names must be between 3 and 24 characters in length and use numbers and lower-case letters only.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] metadata: A name-value pair that represents queue metadata.
:param pulumi.Input[str] queue_name: A queue name must be unique within a storage account and must be between 3 and 63 characters.The name must comprise of lowercase alphanumeric and dash(-) characters only, it should begin and end with an alphanumeric character and it cannot have two consecutive dash(-) characters.
:param pulumi.Input[str] resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
if account_name is None and not opts.urn:
raise TypeError("Missing required property 'account_name'")
__props__['account_name'] = account_name
__props__['metadata'] = metadata
if queue_name is None and not opts.urn:
raise TypeError("Missing required property 'queue_name'")
__props__['queue_name'] = queue_name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['approximate_message_count'] = None
__props__['name'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:storage:Queue"), pulumi.Alias(type_="azure-nextgen:storage/latest:Queue"), pulumi.Alias(type_="azure-nextgen:storage/v20190601:Queue")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Queue, __self__).__init__(
'azure-nextgen:storage/v20200801preview:Queue',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Queue':
"""
Get an existing Queue resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return Queue(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="approximateMessageCount")
def approximate_message_count(self) -> pulumi.Output[int]:
"""
Integer indicating an approximate number of messages in the queue. This number is not lower than the actual number of messages in the queue, but could be higher.
"""
return pulumi.get(self, "approximate_message_count")
@property
@pulumi.getter
def metadata(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A name-value pair that represents queue metadata.
"""
return pulumi.get(self, "metadata")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
|
PypiClean
|
/sumotools-0.7.2.tar.gz/sumotools-0.7.2/traci/_simulation.py
|
# @file _simulation.py
# @author Daniel Krajzewicz
# @author Jakob Erdmann
# @author Michael Behrisch
# @date 2011-03-15
# @version $Id$
from __future__ import absolute_import
import struct
import collections
from . import constants as tc
from .domain import Domain
from .storage import Storage
Stage = collections.namedtuple('Stage', ['stageType', 'line', 'destStop', 'edges', 'travelTime', 'cost'])
def _readStage(result):
# compound size and type
_, _, stageType = result.read("!iBi")
result.read("!B") # Type
line = result.readString()
result.read("!B") # Type
destStop = result.readString()
result.read("!B") # Type
edges = result.readStringList()
_, travelTime, _, cost = result.read("!BdBd")
return Stage(stageType, line, destStop, edges, travelTime, cost)
_RETURN_VALUE_FUNC = {tc.VAR_TIME_STEP: Storage.readInt,
tc.VAR_LOADED_VEHICLES_NUMBER: Storage.readInt,
tc.VAR_LOADED_VEHICLES_IDS: Storage.readStringList,
tc.VAR_DEPARTED_VEHICLES_NUMBER: Storage.readInt,
tc.VAR_DEPARTED_VEHICLES_IDS: Storage.readStringList,
tc.VAR_ARRIVED_VEHICLES_NUMBER: Storage.readInt,
tc.VAR_ARRIVED_VEHICLES_IDS: Storage.readStringList,
tc.VAR_PARKING_STARTING_VEHICLES_NUMBER: Storage.readInt,
tc.VAR_PARKING_STARTING_VEHICLES_IDS: Storage.readStringList,
tc.VAR_PARKING_ENDING_VEHICLES_NUMBER: Storage.readInt,
tc.VAR_PARKING_ENDING_VEHICLES_IDS: Storage.readStringList,
tc.VAR_STOP_STARTING_VEHICLES_NUMBER: Storage.readInt,
tc.VAR_STOP_STARTING_VEHICLES_IDS: Storage.readStringList,
tc.VAR_STOP_ENDING_VEHICLES_NUMBER: Storage.readInt,
tc.VAR_STOP_ENDING_VEHICLES_IDS: Storage.readStringList,
tc.VAR_MIN_EXPECTED_VEHICLES: Storage.readInt,
tc.VAR_BUS_STOP_WAITING: Storage.readInt,
tc.VAR_TELEPORT_STARTING_VEHICLES_NUMBER: Storage.readInt,
tc.VAR_TELEPORT_STARTING_VEHICLES_IDS: Storage.readStringList,
tc.VAR_TELEPORT_ENDING_VEHICLES_NUMBER: Storage.readInt,
tc.VAR_TELEPORT_ENDING_VEHICLES_IDS: Storage.readStringList,
tc.VAR_DELTA_T: Storage.readInt,
tc.VAR_NET_BOUNDING_BOX: lambda result: (result.read("!dd"), result.read("!dd"))}
class SimulationDomain(Domain):
def __init__(self):
Domain.__init__(self, "simulation", tc.CMD_GET_SIM_VARIABLE, tc.CMD_SET_SIM_VARIABLE,
tc.CMD_SUBSCRIBE_SIM_VARIABLE, tc.RESPONSE_SUBSCRIBE_SIM_VARIABLE,
tc.CMD_SUBSCRIBE_SIM_CONTEXT, tc.RESPONSE_SUBSCRIBE_SIM_CONTEXT,
_RETURN_VALUE_FUNC)
def getCurrentTime(self):
"""getCurrentTime() -> integer
Returns the current simulation time in ms.
"""
return self._getUniversal(tc.VAR_TIME_STEP)
def getLoadedNumber(self):
"""getLoadedNumber() -> integer
Returns the number of vehicles which were loaded in this time step.
"""
return self._getUniversal(tc.VAR_LOADED_VEHICLES_NUMBER)
def getLoadedIDList(self):
"""getLoadedIDList() -> list(string)
Returns a list of ids of vehicles which were loaded in this time step.
"""
return self._getUniversal(tc.VAR_LOADED_VEHICLES_IDS)
def getDepartedNumber(self):
"""getDepartedNumber() -> integer
Returns the number of vehicles which departed (were inserted into the road network) in this time step.
"""
return self._getUniversal(tc.VAR_DEPARTED_VEHICLES_NUMBER)
def getDepartedIDList(self):
"""getDepartedIDList() -> list(string)
Returns a list of ids of vehicles which departed (were inserted into the road network) in this time step.
"""
return self._getUniversal(tc.VAR_DEPARTED_VEHICLES_IDS)
def getArrivedNumber(self):
"""getArrivedNumber() -> integer
Returns the number of vehicles which arrived (have reached their destination and are removed from the road network) in this time step.
"""
return self._getUniversal(tc.VAR_ARRIVED_VEHICLES_NUMBER)
def getArrivedIDList(self):
"""getArrivedIDList() -> list(string)
Returns a list of ids of vehicles which arrived (have reached their destination and are removed from the road network) in this time step.
"""
return self._getUniversal(tc.VAR_ARRIVED_VEHICLES_IDS)
def getParkingStartingVehiclesNumber(self):
"""getParkingStartingVehiclesNumber() -> integer
.
"""
return self._getUniversal(tc.VAR_PARKING_STARTING_VEHICLES_NUMBER)
def getParkingStartingVehiclesIDList(self):
"""getParkingStartingVehiclesIDList() -> list(string)
.
"""
return self._getUniversal(tc.VAR_PARKING_STARTING_VEHICLES_IDS)
def getParkingEndingVehiclesNumber(self):
"""getParkingEndingVehiclesNumber() -> integer
.
"""
return self._getUniversal(tc.VAR_PARKING_ENDING_VEHICLES_NUMBER)
def getParkingEndingVehiclesIDList(self):
"""getParkingEndingVehiclesIDList() -> list(string)
.
"""
return self._getUniversal(tc.VAR_PARKING_ENDING_VEHICLES_IDS)
def getStopStartingVehiclesNumber(self):
"""getStopStartingVehiclesNumber() -> integer
.
"""
return self._getUniversal(tc.VAR_STOP_STARTING_VEHICLES_NUMBER)
def getStopStartingVehiclesIDList(self):
"""getStopStartingVehiclesIDList() -> list(string)
.
"""
return self._getUniversal(tc.VAR_STOP_STARTING_VEHICLES_IDS)
def getStopEndingVehiclesNumber(self):
"""getStopEndingVehiclesNumber() -> integer
.
"""
return self._getUniversal(tc.VAR_STOP_ENDING_VEHICLES_NUMBER)
def getStopEndingVehiclesIDList(self):
"""getStopEndingVehiclesIDList() -> list(string)
.
"""
return self._getUniversal(tc.VAR_STOP_ENDING_VEHICLES_IDS)
def getMinExpectedNumber(self):
"""getMinExpectedNumber() -> integer
Returns the number of vehicles which are in the net plus the
ones still waiting to start. This number may be smaller than
the actual number of vehicles still to come because of delayed
route file parsing. If the number is 0 however, it is
guaranteed that all route files have been parsed completely
and all vehicles have left the network.
"""
return self._getUniversal(tc.VAR_MIN_EXPECTED_VEHICLES)
def getBusStopWaiting(self, stopID):
"""getBusStopWaiting() -> integer
Get the total number of waiting persons at the named bus stop.
"""
return self._getUniversal(tc.VAR_BUS_STOP_WAITING, stopID)
def getStartingTeleportNumber(self):
"""getStartingTeleportNumber() -> integer
Returns the number of vehicles which started to teleport in this time step.
"""
return self._getUniversal(tc.VAR_TELEPORT_STARTING_VEHICLES_NUMBER)
def getStartingTeleportIDList(self):
"""getStartingTeleportIDList() -> list(string)
Returns a list of ids of vehicles which started to teleport in this time step.
"""
return self._getUniversal(tc.VAR_TELEPORT_STARTING_VEHICLES_IDS)
def getEndingTeleportNumber(self):
"""getEndingTeleportNumber() -> integer
Returns the number of vehicles which ended to be teleported in this time step.
"""
return self._getUniversal(tc.VAR_TELEPORT_ENDING_VEHICLES_NUMBER)
def getEndingTeleportIDList(self):
"""getEndingTeleportIDList() -> list(string)
Returns a list of ids of vehicles which ended to be teleported in this time step.
"""
return self._getUniversal(tc.VAR_TELEPORT_ENDING_VEHICLES_IDS)
def getDeltaT(self):
"""getDeltaT() -> integer
Returns the length of one simulation step in milliseconds
"""
return self._getUniversal(tc.VAR_DELTA_T)
def getNetBoundary(self):
"""getNetBoundary() -> ((double, double), (double, double))
The boundary box of the simulation network.
"""
return self._getUniversal(tc.VAR_NET_BOUNDING_BOX)
def convert2D(self, edgeID, pos, laneIndex=0, toGeo=False):
posType = tc.POSITION_2D
if toGeo:
posType = tc.POSITION_LON_LAT
self._connection._beginMessage(tc.CMD_GET_SIM_VARIABLE, tc.POSITION_CONVERSION,
"", 1 + 4 + 1 + 4 + len(edgeID) + 8 + 1 + 1 + 1)
self._connection._string += struct.pack("!Bi", tc.TYPE_COMPOUND, 2)
self._connection._packString(edgeID, tc.POSITION_ROADMAP)
self._connection._string += struct.pack("!dBBB",
pos, laneIndex, tc.TYPE_UBYTE, posType)
return self._connection._checkResult(tc.CMD_GET_SIM_VARIABLE, tc.POSITION_CONVERSION, "").read("!dd")
def convert3D(self, edgeID, pos, laneIndex=0, toGeo=False):
posType = tc.POSITION_3D
if toGeo:
posType = tc.POSITION_LON_LAT_ALT
self._connection._beginMessage(tc.CMD_GET_SIM_VARIABLE, tc.POSITION_CONVERSION,
"", 1 + 4 + 1 + 4 + len(edgeID) + 8 + 1 + 1 + 1)
self._connection._string += struct.pack("!Bi", tc.TYPE_COMPOUND, 2)
self._connection._packString(edgeID, tc.POSITION_ROADMAP)
self._connection._string += struct.pack("!dBBB",
pos, laneIndex, tc.TYPE_UBYTE, posType)
return self._connection._checkResult(tc.CMD_GET_SIM_VARIABLE, tc.POSITION_CONVERSION, "").read("!ddd")
def convertRoad(self, x, y, isGeo=False):
posType = tc.POSITION_2D
if isGeo:
posType = tc.POSITION_LON_LAT
self._connection._beginMessage(
tc.CMD_GET_SIM_VARIABLE, tc.POSITION_CONVERSION, "", 1 + 4 + 1 + 8 + 8 + 1 + 1)
self._connection._string += struct.pack("!Bi", tc.TYPE_COMPOUND, 2)
self._connection._string += struct.pack("!Bdd", posType, x, y)
self._connection._string += struct.pack("!BB",
tc.TYPE_UBYTE, tc.POSITION_ROADMAP)
result = self._connection._checkResult(
tc.CMD_GET_SIM_VARIABLE, tc.POSITION_CONVERSION, "")
return result.readString(), result.readDouble(), result.read("!B")[0]
def convertGeo(self, x, y, fromGeo=False):
fromType = tc.POSITION_2D
toType = tc.POSITION_LON_LAT
if fromGeo:
fromType = tc.POSITION_LON_LAT
toType = tc.POSITION_2D
self._connection._beginMessage(
tc.CMD_GET_SIM_VARIABLE, tc.POSITION_CONVERSION, "", 1 + 4 + 1 + 8 + 8 + 1 + 1)
self._connection._string += struct.pack("!Bi", tc.TYPE_COMPOUND, 2)
self._connection._string += struct.pack("!Bdd", fromType, x, y)
self._connection._string += struct.pack("!BB", tc.TYPE_UBYTE, toType)
return self._connection._checkResult(tc.CMD_GET_SIM_VARIABLE, tc.POSITION_CONVERSION, "").read("!dd")
def getDistance2D(self, x1, y1, x2, y2, isGeo=False, isDriving=False):
"""getDistance2D(double, double, double, double, boolean, boolean) -> double
Returns the distance between the two coordinate pairs (x1,y1) and (x2,y2)
If isGeo=True, coordinates are interpreted as longitude and latitude rather
than cartesian coordinates in meters.
If isDriving=True, the coordinates are mapped onto the road network and the
length of the shortest route in the network is returned. Otherwise, the
straight-line distance is returned.
"""
posType = tc.POSITION_2D
if isGeo:
posType = tc.POSITION_LON_LAT
distType = tc.REQUEST_AIRDIST
if isDriving:
distType = tc.REQUEST_DRIVINGDIST
self._connection._beginMessage(
tc.CMD_GET_SIM_VARIABLE, tc.DISTANCE_REQUEST, "", 1 + 4 + 1 + 8 + 8 + 1 + 8 + 8 + 1)
self._connection._string += struct.pack("!Bi", tc.TYPE_COMPOUND, 3)
self._connection._string += struct.pack("!Bdd", posType, x1, y1)
self._connection._string += struct.pack(
"!BddB", posType, x2, y2, distType)
return self._connection._checkResult(tc.CMD_GET_SIM_VARIABLE, tc.DISTANCE_REQUEST, "").readDouble()
def getDistanceRoad(self, edgeID1, pos1, edgeID2, pos2, isDriving=False):
"""getDistanceRoad(string, double, string, double, boolean) -> double
Reads two positions on the road network and an indicator whether the air or the driving distance shall be computed. Returns the according distance.
"""
distType = tc.REQUEST_AIRDIST
if isDriving:
distType = tc.REQUEST_DRIVINGDIST
self._connection._beginMessage(tc.CMD_GET_SIM_VARIABLE, tc.DISTANCE_REQUEST, "",
1 + 4 + 1 + 4 + len(edgeID1) + 8 + 1 + 1 + 4 + len(edgeID2) + 8 + 1 + 1)
self._connection._string += struct.pack("!Bi", tc.TYPE_COMPOUND, 3)
self._connection._packString(edgeID1, tc.POSITION_ROADMAP)
self._connection._string += struct.pack("!dB", pos1, 0)
self._connection._packString(edgeID2, tc.POSITION_ROADMAP)
self._connection._string += struct.pack("!dBB", pos2, 0, distType)
return self._connection._checkResult(tc.CMD_GET_SIM_VARIABLE, tc.DISTANCE_REQUEST, "").readDouble()
def findRoute(self, fromEdge, toEdge, vtype="", depart=-1., routingMode=0):
self._connection._beginMessage(tc.CMD_GET_SIM_VARIABLE, tc.FIND_ROUTE, "",
1 + 4 + 1 + 4 + len(fromEdge) + 1 + 4 + len(toEdge) + 1 + 4 + len(vtype) + 1 + 8 + 1 + 4)
self._connection._string += struct.pack("!Bi", tc.TYPE_COMPOUND, 5)
self._connection._packString(fromEdge)
self._connection._packString(toEdge)
self._connection._packString(vtype)
self._connection._string += struct.pack("!BdBi", tc.TYPE_DOUBLE, depart, tc.TYPE_INTEGER, routingMode)
return _readStage(self._connection._checkResult(tc.CMD_GET_SIM_VARIABLE, tc.FIND_ROUTE, ""))
def findIntermodalRoute(self, fromEdge, toEdge, modes="", depart=-1., routingMode=0, speed=-1., walkFactor=-1., departPos=-1., arrivalPos=-1., departPosLat=-1., ptype="", vtype=""):
self._connection._beginMessage(tc.CMD_GET_SIM_VARIABLE, tc.FIND_INTERMODAL_ROUTE, "",
1 + 4 + 1 + 4 + len(fromEdge) + 1 + 4 + len(toEdge) + 1 + 4 + len(modes) + 1 + 8 + 1 + 4 + 1 + 8 + 1 + 8 + 1 + 8 + 1 + 8 + 1 + 8 + 1 + 4 + len(ptype) + 1 + 4 + len(vtype))
self._connection._string += struct.pack("!Bi", tc.TYPE_COMPOUND, 12)
self._connection._packString(fromEdge)
self._connection._packString(toEdge)
self._connection._packString(modes)
self._connection._string += struct.pack("!BdBi", tc.TYPE_DOUBLE, depart, tc.TYPE_INTEGER, routingMode)
self._connection._string += struct.pack("!BdBd", tc.TYPE_DOUBLE, speed, tc.TYPE_DOUBLE, walkFactor)
self._connection._string += struct.pack("!BdBdBd", tc.TYPE_DOUBLE, departPos, tc.TYPE_DOUBLE, arrivalPos, tc.TYPE_DOUBLE, departPosLat)
self._connection._packString(ptype)
self._connection._packString(vtype)
answer = self._connection._checkResult(tc.CMD_GET_SIM_VARIABLE, tc.FIND_INTERMODAL_ROUTE, "")
result = []
for c in range(answer.readInt()):
answer.read("!B") # Type
result.append(_readStage(answer))
return result
def clearPending(self, routeID=""):
self._connection._beginMessage(tc.CMD_SET_SIM_VARIABLE, tc.CMD_CLEAR_PENDING_VEHICLES, "",
1 + 4 + len(routeID))
self._connection._packString(routeID)
self._connection._sendExact()
def saveState(self, fileName):
self._connection._beginMessage(tc.CMD_SET_SIM_VARIABLE, tc.CMD_SAVE_SIMSTATE, "",
1 + 4 + len(fileName))
self._connection._packString(fileName)
self._connection._sendExact()
def subscribe(self, varIDs=(tc.VAR_DEPARTED_VEHICLES_IDS,), begin=0, end=2**31 - 1):
"""subscribe(list(integer), double, double) -> None
Subscribe to one or more simulation values for the given interval.
"""
Domain.subscribe(self, "", varIDs, begin, end)
def getSubscriptionResults(self):
"""getSubscriptionResults() -> dict(integer: <value_type>)
Returns the subscription results for the last time step.
It is not possible to retrieve older subscription results than the ones
from the last time step.
"""
return Domain.getSubscriptionResults(self, "")
SimulationDomain()
|
PypiClean
|
/sierra-research-1.3.5.tar.gz/sierra-research-1.3.5/README.rst
|
===========================================================================
SIERRA (reSearch pIpEline for Reproducibility, Reusability, and Automation)
===========================================================================
.. |pepy-downloads| image:: https://pepy.tech/badge/sierra-research
:target: https://pepy.tech/project/sierra-research
.. |pypi-version| image:: https://img.shields.io/pypi/v/sierra-research.svg
:target: https://pypi.python.org/pypi/sierra-research/
.. |supported-pythons| image:: https://img.shields.io/pypi/pyversions/sierra-research.svg
:target: https://pypi.python.org/pypi/sierra-research/
.. |linux-supported| image:: https://svgshare.com/i/Zhy.svg
.. |osx-supported| image:: https://svgshare.com/i/ZjP.svg
.. |ci-integration-master| image:: https://github.com/jharwell/sierra/actions/workflows/integration-all.yml/badge.svg?branch=master
.. |ci-analysis-master| image:: https://github.com/jharwell/sierra/actions/workflows/static-analysis.yml/badge.svg?branch=master
.. |ci-coverage-master| image:: https://coveralls.io/repos/github/jharwell/sierra/badge.svg?branch=master
.. |ci-integration-devel| image:: https://github.com/jharwell/sierra/actions/workflows/integration-all.yml/badge.svg?branch=devel
.. |ci-analysis-devel| image:: https://github.com/jharwell/sierra/actions/workflows/static-analysis.yml/badge.svg?branch=devel
.. |ci-coverage-devel| image:: https://coveralls.io/repos/github/jharwell/sierra/badge.svg?branch=devel
.. |license| image:: https://img.shields.io/badge/License-MIT-blue.svg
.. |doi| image:: https://zenodo.org/badge/125774567.svg
:target: https://zenodo.org/badge/latestdoi/125774567
.. |docs| image:: https://readthedocs.org/projects/sierra/badge/?version=master
:target: https://sierra.readthedocs.io/en/master/
.. |maintenance| image:: https://img.shields.io/badge/Maintained%3F-yes-green.svg
:target: https://gitHub.com/jharwell/sierra/graphs/commit-activity
:Usage:
|pepy-downloads| |pypi-version| |supported-pythons| |linux-supported|
|osx-supported|
:Release:
|ci-analysis-master| |ci-integration-master| |ci-coverage-master|
:Development:
|ci-analysis-devel| |ci-integration-devel| |ci-coverage-devel|
:Misc:
|license| |doi| |docs| |maintenance|
TL;DR
=====
What is SIERRA? See `What is SIERRA?`_
Why should you use SIERRA? See `Why SIERRA?`_
To install SIERRA (requires python 3.8+):
::
pip3 install sierra-research
To get started using SIERRA, see `getting started
<https://sierra.readthedocs.io/en/master/src/getting_started.html>`_.
Want to cite SIERRA? See `Citing`_.
Have an issue using SIERRA? See `Troubleshooting`_.
What is SIERRA?
===============
.. figure:: https://raw.githubusercontent.com/jharwell/sierra/master/docs/figures/architecture.png
SIERRA architecture, organized by pipeline stage. Stages are listed left to
right, and an approximate joint architectural/functional stack is top to
bottom for each stage. “...” indicates areas where SIERRA is designed via
plugins to be easily extensible. “Host machine” indicates the machine SIERRA
was invoked on.
SIERRA is a command line tool and plugin framework for:
- Automating scientific research, providing faculties for seamless experiment
generation, execution, and results processing.
- Accelerating research cycles by allowing researchers to focus on the “science”
aspects: developing new things and designing experiments to test them.
- Improving the reproducibility of scientific research, particularly in AI.
Why SIERRA?
===========
- SIERRA changes the paradigm of the engineering tasks researchers must perform
from manual and procedural to declarative and automated. That is, from::
"I need to perform these steps to run the experiment, process the data and
generate the graphs I want."
to::
"OK SIERRA: Here is the environment and simulator/robot platform I want to
use, the deliverables I want to generate, and the data I want to appear on
them for my research query--GO!"
Essentially, SIERRA handles the “engineering” parts of research on the
backend, such as: generating experiments, configuring execution environments
or platforms, running the generated experiments, and processing experimental
results to generate statistics, and/or visualizations. It also handles random
seeds, algorithm stochasticity, and other low-level details.
- It eliminates manual reconfiguration of experiments across simulator/robot
platforms by decoupling the concepts of execution environment and platform;
any supported pair can be selected in a mix-and-match fashion (see `SIERRA
Support Matrix`_). Thus, it removes the need for throw-away scripts for data
processing and deliverable generation.
- SIERRA can be used with code written in any language; only bindings must be
written in python.
- SIERRA has a rich model framework allowing you to run arbitrary models,
generate data, and plot it on the same figure as empirical results,
automatically.
- Its deeply modular architecture makes it easy to customize for the needs
of a specific research project.
Not sure if SIERRA makes sense for your research? Consider the following use
cases:
- `Use Case #1: Alice The Robotics Researcher`_
- `Use Case #2: Alice The Contagion Modeler`_
If aspects of either use case sound familiar, then there is a strong chance
SIERRA could help you! SIERRA is well documented--see the `SIERRA docs
<https://sierra.readthedocs.io/en/master/>`_ to get started.
Use Case #1: Alice The Robotics Researcher
------------------------------------------
Alice is a researcher at a large university that has developed a new distributed
task allocation algorithm ``$\alpha$`` for use in a foraging task where
robots must coordinate to find objects of interest in an unknown environment and
bring them to a central location. Alice wants to implement her algorithm so she
can investigate:
- How well it scales with the number of robots, specifically if it remains
efficient with up to 1000 robots in several different scenarios.
- How robust it is with respect to sensor and actuator noise.
- How it compares to other similar state of the art algorithms on a foraging
task: ``$\beta,\gamma$``.
Alice is faced with the following heterogeneity matrix which she has to deal
with to answer her research queries, *in addition to the technical challenges of
the AI elements themselves*:
.. list-table::
:header-rows: 1
:widths: 25,25,25
* - Algorithm
- Contains stochasticity?
- Outputs data in?
* - ``$\alpha$``
- Yes
- CSV, rosbag
* - ``$\beta$``
- Yes
- CSV, rosbag
* - ``$\gamma$``
- No
- rosbag
Alice is familiar with ROS, and wants to use it with large scale simulated and
small scale real-robot experiments with TurtleBots. However, for real robots she
is unsure what data she will ultimately need, and wants to capture all ROS
messages, to avoid having to redo experiments later. She has access to a large
SLURM-managed cluster, and prefers to develop code on her laptop.
Use Case #2: Alice The Contagion Modeler
----------------------------------------
Alice has teamed with Bob, a biologist, to model the spread of contagion among
agents in a population, and how that affects their individual and collective
abilities to do tasks. She believes her ``$\alpha$`` algorithm can be reused
in this context. However, Bob is not convinced and has selected several
multi-agent models from recent papers: ``$\delta,\epsilon$``, and wants
Alice to compare ``$\alpha$`` to them. ``$\delta$`` was originally
developed in NetLogo, for modeling disease transmission in
animals. ``$\epsilon$`` was originally developed for ARGoS to model the
effects of radiation on robots.
Alice is faced with the following heterogeneity matrix which she must deal with
with to answer her research query, *in addition to the technical challenges of
the AI elements themselves*:
.. list-table::
:header-rows: 1
:widths: 25,25,25
* - Algorithm
- Can Run On?
- Input Requirements?
* - ``$\alpha$``
- ROS/Gazebo
- XML
* - ``$\delta$``
- NetLogo
- NetLogo
* - ``$\epsilon$``
- ARGoS
- XML
Bob is interested in how the rate of contagion spread varies with agent velocity
and population size. Bob needs to prepare succinct, comprehensive visual
representations of the results of this research queries for a a presentation,
including visual comparisons of the multi-agent model as it runs for each
algorithm. He will give Alice a range of parameter values to test for each
algorithm based on his ecological knowledge, and rely on Alice to perform the
experiments. For this project, Alice does not have access to HPC resources, but
does have a handful of servers in her lab which she can use.
SIERRA Support Matrix
=====================
SIERRA supports multiple `platforms
<https://sierra.readthedocs.io/en/master/src/platform/index.html>`_ which
researchers can write code to target. In SIERRA terminology, a platform is a
"thing" (usually a simulator or robot) that you want to write to code to run
on. Note that platform != OS, in SIERRA terminology. If a SIERRA platform runs
on a given OS, then SIERRA supports doing so; if it does not, then SIERRA does
not. For example, SIERRA does not support running ARGoS on windows, because
ARGoS does not support windows.
SIERRA supports multiple execution environments for execution of experiments,
such as `High Performance Computing (HPC) environments
<https://sierra.readthedocs.io/en/master/src/exec_env/hpc.html>`_ and `real
robots <https://sierra.readthedocs.io/en/master/src/exec_env/robots.html>`_.
Which execution environment experiments targeting a given platform is (somewhat)
independent of the platform itself (see below).
SIERRA also supports multiple output formats for experimental outputs, as shown
below. SIERRA currently only supports XML experimental inputs.
SIERRA supports (mostly) mix-and-match between platforms, execution
environments, experiment input/output formats as shown in its support matrix
below. This is one of the most powerful features of SIERRA! If your desired
platform/execution environment is not listed, see the `plugin tutorials
<https://sierrap.readthedocs.io/en/master/src/tutorials.html>`_ for how to add
it via a plugin.
.. list-table::
:header-rows: 1
:widths: 25,25,25,25
* - Execution Environment
- Platform
- Experimental Input Format
- Experimental Output Format
* - `SLURM <https://slurm.schedmd.com/documentation.html>`_: An HPC cluster
managed by the SLURM scheduler.
- ARGoS, ROS1+Gazebo
- XML
- CSV, PNG
* - `Torque/MOAB
<https://adaptivecomputing.com/cherry-services/torque-resource-manager>`_:
An HPC cluster managed by the Torque/MOAB scheduler.
- ARGoS, ROS1+Gazebo
- XML
- CSV, PNG
* - ADHOC: A miscellaneous collection of networked HPC compute nodes or
random servers; not managed by a scheduler.
- ARGoS, ROS1+Gazebo
- XML
- CSV, PNG
* - Local: The SIERRA host machine,e.g., a researcher's laptop.
- ARGoS, ROS1+Gazebo
- XML
- CSV, PNG
* - ROS1+Turtlebot3: `Turtlebot3
<https://emanual.robotis.com/docs/en/platform/turtlebot3/overview>`_
robots with ROS1.
- ROS1+Gazebo, ROS1+robot
- XML
- CSV, PNG
For more details about the platforms out experimental output formats, see below.
.. list-table::
:header-rows: 1
:widths: 50,50
* - Platform
- Description
* - `ARGoS <https://www.argos-sim.info/index.php>`_
- Simulator for fast simulation of large swarms. Requires ARGoS >=
3.0.0-beta59.
* - `ROS1 <https://ros.org>`_ + `Gazebo <https://www.gazebosim.org>`_
- Using ROS1 with the Gazebo simulator. Requires Gazebo >= 11.9.0, ROS1
Noetic or later.
* - `ROS1+Robot <https://ros.org>`_
- Using ROS1 with a real robot platform of your choice. ROS1 Noetic or
later is required.
.. list-table::
:header-rows: 1
:widths: 50,50
* - Experimental Output Format
- Scope
* - CSV file
- Raw experimental outputs, transforming into heatmap images.
* - PNG file
- Stitching images together into videos.
Requirements To Use SIERRA
==========================
The basic requirements are:
- Recent OSX (tested with 12+) or Linux (tested with ubuntu 20.04+).
- python >= 3.8.
.. NOTE:: Windows is not supported currently. Not because it can't be supported,
but because there are not currently any platform plugins that which
work on windows. That is, SIERRA's OS support is dictated by the OS
support of its current platform plugins, none of which support
windows.
If windows support would be helpful for your intended usage of
SIERRA, please get in touch with me--SIERRA is written in pure
python and can definitely be made to work on windows.
For more details, including the requirements for researcher code, see the
`SIERRA requirements
<https://sierra.readthedocs.io/en/master/src/requirements.html>`_.
Citing
======
If you use SIERRA and have found it helpful, please cite the following paper::
@inproceedings{Harwell2022a-SIERRA,
author = {Harwell, John and Lowmanstone, London and Gini, Maria},
title = {SIERRA: A Modular Framework for Research Automation},
year = {2022},
isbn = {9781450392136},
publisher = {International Foundation for Autonomous Agents and Multiagent Systems},
booktitle = {Proceedings of the 21st International Conference on Autonomous Agents and Multiagent Systems},
pages = {1905–1907}
}
You can also cite the specific version of SIERRA used with the DOI at the top of
this page, to help facilitate reproducibility.
Troubleshooting
===============
If you have problems using SIERRA, please open an issue or post in the Github
forum and I'll be happy to help you work through it.
Contributing
============
I welcome all types of contributions, no matter how large or how small, and if
you have an idea, I'm happy to talk about it at any point :-). See `here
<https://sierra.readthedocs.io/en/master/src/contributing.html>`_
for the general procedure.
|
PypiClean
|
/datalad_deprecated-0.3.0-py3-none-any.whl/datalad_deprecated/metadata/extractors/datalad_rfc822.py
|
import logging
lgr = logging.getLogger('datalad.metadata.extractors.datalad_rfc822')
from os.path import exists
import email
import email.parser # necessary on Python 2.7.6 (trusty)
from os.path import join as opj
from datalad.interface.base import dedent_docstring
from .base import BaseMetadataExtractor
def _split_list_field(content):
return [i.strip() for i in content.split(',') if i.strip()]
def _beautify_multiline_field(content):
content = dedent_docstring(content)
lines = content.split('\n')
title = ''
if len(lines):
title = lines[0]
if len(lines) > 1:
content = ''
for l in lines[1:]:
l = l.strip()
content = '{}{}{}'.format(
content,
' ' if len(content) and l != '.' and content[-1] != '\n' else '',
l if l != '.' else '\n')
return title, content
class MetadataExtractor(BaseMetadataExtractor):
_metadata_compliance = "http://docs.datalad.org/metadata.html#v0-1"
_core_metadata_filename = opj('.datalad', 'meta.rfc822')
_key2stdkey = {
'name': 'name',
'license': 'license',
'author': 'author',
'maintainer': 'maintainer',
'audience': 'audience',
'homepage': 'homepage',
'version': 'version',
'funding': 'fundedby',
'issue-tracker': 'issuetracker',
'cite-as': 'citation',
'doi': 'sameas',
'description': None,
}
def _get_dataset_metadata(self):
meta = {}
if not exists(opj(self.ds.path, self._core_metadata_filename)):
return meta
spec = email.parser.Parser().parse(
open(opj(self.ds.path, self._core_metadata_filename)),
headersonly=True)
for term in self._key2stdkey:
if term not in spec:
continue
hkey = self._key2stdkey[term]
content = spec[term]
if term == 'description':
short, long = _beautify_multiline_field(content)
meta['shortdescription'] = short
meta['description'] = long
elif term == 'license':
# TODO if title looks like a URL, use it as @id
label, desc = _beautify_multiline_field(content)
if label:
meta[hkey] = [label, desc]
else:
meta[hkey] = desc
elif term in ('maintainer', 'author'):
meta[hkey] = _split_list_field(content)
elif term == 'doi':
meta[hkey] = 'http://dx.doi.org/{}'.format(content)
else:
meta[hkey] = content
meta['conformsto'] = self._metadata_compliance
return meta
def _get_content_metadata(self):
return []
|
PypiClean
|
/genx3-3.6.22.tar.gz/genx3-3.6.22/genx/levenberg_marquardt.py
|
import pickle
import _thread
from dataclasses import dataclass
from numpy import *
from scipy.optimize import leastsq
from .exceptions import ErrorBarError, OptimizerInterrupted
from .core.config import BaseConfig
from .core.custom_logging import iprint
from .model import Model
from .solver_basis import GenxOptimizer, GenxOptimizerCallback, SolverParameterInfo, SolverResultInfo, SolverUpdateInfo
class LMDefaultCallbacks(GenxOptimizerCallback):
def text_output(self, text):
iprint(text)
sys.stdout.flush()
def plot_output(self, update_data):
pass
def parameter_output(self, param_info):
pass
def fitting_ended(self, result_data):
pass
def autosave(self):
pass
@dataclass
class LMConfig(BaseConfig):
section='solver'
groups={}
class LMOptimizer(GenxOptimizer):
'''
Optimizer based on Levenberg-Marquardt algorithm.
'''
opt: LMConfig
model: Model
fom_log: ndarray
start_guess: ndarray
covar: ndarray
_callbacks: GenxOptimizerCallback=LMDefaultCallbacks()
n_fom_evals=0
def is_running(self):
return False
def __init__(self):
GenxOptimizer.__init__(self)
self.model=Model()
self.fom_log=array([[0, 0]])[0:0]
self.covar=None
def pickle_string(self, clear_evals: bool = False):
return pickle.dumps(self)
def pickle_load(self, pickled_string: bytes):
obj=pickle.loads(pickled_string, encoding='latin1', errors='ignore')
# TODO: set own options from object
def get_start_guess(self):
return self.start_guess
def get_model(self) -> Model:
return self.model
def get_fom_log(self):
return self.fom_log
def connect_model(self, model_obj: Model):
'''
Connects the model [model] to this object. Retrives the function
that sets the variables and stores a reference to the model.
'''
# Retrieve parameters from the model
(param_funcs, start_guess, par_min, par_max)=model_obj.get_fit_pars(use_bounds=False)
# Control parameter setup
self.par_funcs=param_funcs
self.model=model_obj
self.n_dim=len(param_funcs)
self.start_guess=start_guess
def calc_sim(self, vec):
''' calc_sim(self, vec) --> None
Function that will evaluate the the data points for
parameters in vec.
'''
model_obj=self.model
# Set the parameter values
list(map(lambda func, value: func(value), self.par_funcs, vec))
self.model.evaluate_sim_func()
return self.model.fom
def calc_fom(self, vec):
'''
Function to calcuate the figure of merit for parameter vector
vec.
'''
if self._stop_fit:
raise OptimizerInterrupted("interrupted")
# Set the parameter values
list(map(lambda func, value: func(value), self.par_funcs, vec))
fom=self.model.evaluate_fit_func(get_elements=True) # fom is squared in leastsq
chi=sign(fom)*sqrt(abs(fom))
self.n_fom_evals+=1
return chi
def calc_error_bar(self, index: int) -> (float, float):
if self.covar is None:
raise ErrorBarError("Could not get covariance matrix from fit, maybe the parameters are coupled/have no influence?")
err=sqrt(self.covar[index, index])
return -err, err
def project_evals(self, index: int):
# -> (ArrayLike, ArrayLike)
pass
def start_fit(self, model: Model):
self.n_fom_evals=0
self.connect_model(model)
self._stop_fit=False
_thread.start_new_thread(self.optimize, ())
def optimize(self):
try:
res=leastsq(self.calc_fom, self.start_guess, full_output=True)
except OptimizerInterrupted:
self._callbacks.fitting_ended(self.get_result_info(interrupted=True))
return
self.best_vec=res[0]
if res[1] is None:
self.covar=None
else:
Chi2Res = self.calc_fom(self.best_vec)**2
s_sq = Chi2Res.sum()/(len(Chi2Res)-len(res[0])) # variance of the residuals
self.covar=res[1]*s_sq
self.plot_output()
self._callbacks.fitting_ended(self.get_result_info())
def stop_fit(self):
self._stop_fit=True
def resume_fit(self, model: Model):
pass
def is_fitted(self):
return self.n_fom_evals>0
def is_configured(self) -> bool:
pass
def set_callbacks(self, callbacks: GenxOptimizerCallback):
self._callbacks=callbacks
def get_callbacks(self) -> 'GenxOptimizerCallback':
return self._callbacks
def plot_output(self):
self.calc_sim(self.best_vec)
data=SolverUpdateInfo(
fom_value=self.model.fom,
fom_name=self.model.fom_func.__name__,
fom_log=self.get_fom_log(),
new_best=True,
data=self.model.data
)
self._callbacks.plot_output(data)
def get_result_info(self, interrupted=False):
result = SolverResultInfo(
start_guess=self.start_guess.copy(),
error_message="",
values=self.best_vec.copy(),
new_best=not interrupted,
population=[],
max_val=[],
min_val=[],
fitting=False
)
return result
|
PypiClean
|
/PyZSI3-2.2.tar.gz/PyZSI3-2.2/ZSI/twisted/interfaces.py
|
import sys, warnings
# twisted & related imports
from zope.interface import classProvides, implements, Interface
# ZSI imports
from ZSI import EvaluateException, ParseException, ParsedSoap, SoapWriter
#
# Stability: Unstable
#
def CheckInputArgs(*interfaces):
"""Must provide at least one interface, the last one may be repeated.
"""
l = len(interfaces)
def wrapper(func):
def check_args(self, *args, **kw):
for i in range(len(args)):
if (l > i and interfaces[i].providedBy(args[i])) or interfaces[-1].providedBy(args[i]):
continue
if l > i: raise TypeError('arg %s does not implement %s' %(args[i], interfaces[i]))
raise TypeError('arg %s does not implement %s' %(args[i], interfaces[-1]))
func(self, *args, **kw)
return check_args
return wrapper
class HandlerChainInterface(Interface):
def processRequest(self, input, **kw):
"""returns a representation of the request, the
last link in the chain must return a response
pyobj with a typecode attribute.
Parameters:
input --
Keyword Parameters:
request -- HTTPRequest instance
resource -- Resource instance
"""
def processResponse(self, output, **kw):
"""returns a string representing the soap response.
Parameters
output --
Keyword Parameters:
request -- HTTPRequest instance
resource -- Resource instance
"""
class CallbackChainInterface(Interface):
def processRequest(self, input, **kw):
"""returns a response pyobj with a typecode
attribute.
Parameters:
input --
Keyword Parameters:
request -- HTTPRequest instance
resource -- Resource instance
"""
class DataHandler:
"""
class variables:
readerClass -- factory class to create reader for ParsedSoap instances.
writerClass -- ElementProxy implementation to use for SoapWriter instances.
"""
classProvides(HandlerChainInterface)
readerClass = None
writerClass = None
@classmethod
def processRequest(cls, input, **kw):
return ParsedSoap(input, readerclass=cls.readerClass)
@classmethod
def processResponse(cls, output, **kw):
sw = SoapWriter(outputclass=cls.writerClass)
sw.serialize(output)
return sw
class DefaultHandlerChain:
@CheckInputArgs(CallbackChainInterface, HandlerChainInterface)
def __init__(self, cb, *handlers):
self.handlercb = cb
self.handlers = handlers
def processRequest(self, arg, **kw):
for h in self.handlers:
arg = h.processRequest(arg, **kw)
return self.handlercb.processRequest(arg, **kw)
def processResponse(self, arg, **kw):
if arg is None:
return
for h in self.handlers:
arg = h.processResponse(arg, **kw)
s = str(arg)
return s
|
PypiClean
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.